From 5ba30a8badd95fe13f6f12e19371c4f849f8d2fa Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Fri, 14 Oct 2022 11:22:18 -0700 Subject: [PATCH 001/495] Added tagging of converted files --- plugins/module_utils/encode.py | 33 ++++++++++++++++++++++++++++++++- plugins/modules/zos_encode.py | 1 + 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index fdaeacdb4..9b5c44a02 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2022 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -453,6 +453,25 @@ def mvs_convert_encoding( return convert_rc + def uss_tag_encoding(self, file_path, tag): + """Tag the file/directory specified with the given code set. + If `file_path` is a directory, all of the files and subdirectories will + be tagged recursively. + + Arguments: + file_path {str} -- Absolute file path to tag. + tag {str} -- Code set to tag the file/directory. + + Raises: + TaggingError: When the chtag command fails. + """ + is_dir = os.path.isdir(file_path) + + tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) + rc, out, err = self.module.run_command(tag_cmd) + if rc != 0: + raise TaggingError(file_path, tag, rc, out, err) + class EncodeError(Exception): def __init__(self, message): @@ -460,6 +479,18 @@ def __init__(self, message): super(EncodeError, self).__init__(self.msg) +class TaggingError(Exception): + def __init__(self, file_path, tag, rc, stdout, stderr): + self.msg = 'An error occurred during tagging of {0} to {1}'.format( + file_path, + tag + ) + self.rc = rc + self.stdout = stdout + self.stderr = stderr + super(TaggingError, self).__init__(self.msg) + + class MoveFileError(Exception): def __init__(self, src, dest, e): self.msg = "Failed when moving {0} to {1}: {2}".format(src, dest, e) diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 2f28768f4..21ca9ae63 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -494,6 +494,7 @@ def run_module(): convert_rc = eu.uss_convert_encoding_prev( src, dest, from_encoding, to_encoding ) + eu.uss_tag_encoding(dest, to_encoding) else: convert_rc = eu.mvs_convert_encoding( src, From 13e2c3bc314feda59b12031d414302070cbb7007 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Tue, 18 Oct 2022 09:10:07 -0700 Subject: [PATCH 002/495] Updated tests for zos_encode --- plugins/modules/zos_encode.py | 13 +++++- .../modules/test_zos_encode_func.py | 41 +++++++++++++++++++ 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 21ca9ae63..0fdbb47e5 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -494,7 +494,6 @@ def run_module(): convert_rc = eu.uss_convert_encoding_prev( src, dest, from_encoding, to_encoding ) - eu.uss_tag_encoding(dest, to_encoding) else: convert_rc = eu.mvs_convert_encoding( src, @@ -506,10 +505,22 @@ def run_module(): ) if convert_rc: + if is_uss_dest: + eu.uss_tag_encoding(dest, to_encoding) + changed = True result = dict(changed=changed, src=src, dest=dest, backup_name=backup_name) else: result = dict(src=src, dest=dest, changed=changed, backup_name=backup_name) + except encode.TaggingError as e: + module.fail_json( + msg=e.msg, + rc=e.rc, + stdout=e.stdout, + stderr=e.stderr, + stdout_lines=e.stdout.splitlines(), + stderr_lines=e.stderr.splitlines(), + ) except Exception as e: module.fail_json(msg=repr(e), **result) diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 970fc8944..5f1e8cfbf 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -14,6 +14,7 @@ from __future__ import absolute_import, division, print_function from shellescape import quote from pprint import pprint +from os import path __metaclass__ = type @@ -126,6 +127,10 @@ def test_uss_encoding_conversion_without_dest(ansible_zos_module): assert result.get("dest") == USS_FILE assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_FILE, state="absent") @@ -149,8 +154,13 @@ def test_uss_encoding_conversion_when_dest_not_exists_01(ansible_zos_module): assert result.get("dest") == USS_NONE_FILE assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_NONE_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_FILE, state="absent") + hosts.all.file(path=USS_NONE_FILE, state="absent") def test_uss_encoding_conversion_when_dest_not_exists_02(ansible_zos_module): @@ -193,6 +203,10 @@ def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): assert result.get("dest") == USS_DEST_FILE assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + for result in tag_results.contacted.values(): + assert FROM_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_FILE, state="absent") hosts.all.file(path=USS_DEST_FILE, state="absent") @@ -217,6 +231,10 @@ def test_uss_encoding_conversion_uss_file_to_uss_path(ansible_zos_module): assert result.get("dest") == USS_DEST_PATH assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}/{1}".format(USS_DEST_PATH, path.basename(USS_FILE))) + for result in tag_results.contacted.values(): + assert FROM_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_FILE, state="absent") hosts.all.file(path=USS_DEST_PATH, state="absent") @@ -244,6 +262,12 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): assert result.get("dest") == USS_DEST_PATH assert result.get("backup_name") is not None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_PATH)) + for result in tag_results.contacted.values(): + assert FROM_ENCODING in result.get("stdout") + assert TO_ENCODING not in result.get("stdout") + assert "untagged" not in result.get("stdout") finally: hosts.all.file(path=USS_PATH, state="absent") hosts.all.file(path=USS_DEST_PATH, state="absent") @@ -292,6 +316,10 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): assert result.get("dest") == USS_DEST_FILE assert result.get("backup_name") is not None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") @@ -369,6 +397,10 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): assert result.get("dest") == USS_DEST_FILE assert result.get("backup_name") is not None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") @@ -417,6 +449,11 @@ def test_uss_encoding_conversion_mvs_pds_to_uss_path(ansible_zos_module): assert result.get("dest") == USS_DEST_PATH assert result.get("backup_name") is None assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_PATH)) + for result in tag_results.contacted.values(): + assert FROM_ENCODING in result.get("stdout") + assert "untagged" not in result.get("stdout") finally: hosts.all.file(path=USS_DEST_PATH, state="absent") @@ -498,6 +535,10 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): # print(cat_result.contacted.values()) # for uss_file_result in cat_result.contacted.values(): # assert TEST_DATA in uss_file_result.get("stdout") + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") From 2d5c8c3dba91562584b1cdee021ffbd263b4d428 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Mon, 24 Oct 2022 17:34:49 -0700 Subject: [PATCH 003/495] Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. --- plugins/modules/zos_copy.py | 162 +++++++++++++++++++++++++----------- 1 file changed, 113 insertions(+), 49 deletions(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 6d7fd98bc..9c3ae782d 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1259,6 +1259,8 @@ def copy_to_pdse( dest_member {str, optional} -- Name of destination member in data set """ new_src = conv_path or temp_path or src + src_members = [] + dest_members = [] if src_ds_type == "USS": if os.path.isfile(new_src): @@ -1267,36 +1269,17 @@ def copy_to_pdse( else: path, dirs, files = next(os.walk(new_src)) - for file in files: - full_file_path = os.path.normpath(path + "/" + file) - - if dest_member: - dest_copy_name = "{0}({1})".format(dest, dest_member) - else: - dest_copy_name = "{0}({1})".format(dest, data_set.DataSet.get_member_name_from_file(file)) - - result = self.copy_to_member(full_file_path, dest_copy_name) + src_members = [os.path.normpath("{0}/{1}".format(path, file)) for file in files] + dest_members = [ + dest_member if dest_member + else data_set.DataSet.get_member_name_from_file(file) + for file in files + ] - if result["rc"] != 0: - msg = "Unable to copy file {0} to data set member {1}".format(file, dest_copy_name) - raise CopyOperationError( - msg=msg, - rc=result["rc"], - stdout=result["out"], - stderr=result["err"] - ) elif src_ds_type in data_set.DataSet.MVS_SEQ: - dest_copy_name = "{0}({1})".format(dest, dest_member) - result = self.copy_to_member(new_src, dest_copy_name) + src_members = [new_src] + dest_members = [dest_member] - if result["rc"] != 0: - msg = "Unable to copy data set {0} to data set member {1}".format(new_src, dest_copy_name) - raise CopyOperationError( - msg=msg, - rc=result["rc"], - stdout=result["out"], - stderr=result["err"] - ) else: members = [] src_data_set_name = data_set.extract_dsname(new_src) @@ -1306,23 +1289,39 @@ def copy_to_pdse( else: members = datasets.list_members(new_src) - for member in members: - copy_src = "{0}({1})".format(src_data_set_name, member) - if dest_member: - dest_copy_name = "{0}({1})".format(dest, dest_member) - else: - dest_copy_name = "{0}({1})".format(dest, member) + src_members = ["{0}({1})".format(src_data_set_name, member) for member in members] + dest_members = [ + dest_member if dest_member + else member + for member in members + ] - result = self.copy_to_member(copy_src, dest_copy_name) + existing_members = datasets.list_members(dest) + overwritten_members = [] + new_members = [] - if result["rc"] != 0: - msg = "Unable to copy data set member {0} to data set member {1}".format(new_src, dest_copy_name) - raise CopyOperationError( - msg=msg, - rc=result["rc"], - stdout=result["out"], - stderr=result["err"] - ) + for src_member, destination_member in zip(src_members, dest_members): + if destination_member in existing_members: + overwritten_members.append(destination_member) + else: + new_members.append(destination_member) + + result = self.copy_to_member(src_member, "{0}({1})".format(dest, destination_member)) + + if result["rc"] != 0: + msg = "Unable to copy source {0} to data set member {1}({2})".format( + new_src, + dest, + destination_member + ) + raise CopyOperationError( + msg=msg, + rc=result["rc"], + stdout=result["out"], + stderr=result["err"], + overwritten_members=overwritten_members, + new_members=new_members + ) def copy_to_member( self, @@ -1558,7 +1557,15 @@ def backup_data(ds_name, ds_type, backup_name, tmphlq=None): ) -def restore_backup(dest, backup, dest_type, use_backup, volume=None): +def restore_backup( + dest, + backup, + dest_type, + use_backup, + volume=None, + members_to_restore=None, + members_to_delete=None +): """Restores a destination file/directory/data set by using a given backup. Arguments: @@ -1569,6 +1576,10 @@ def restore_backup(dest, backup, dest_type, use_backup, volume=None): tries to use an empty data set, and in that case a new data set is allocated instead of copied. volume (str, optional) -- Volume where the data set should be. + members_to_restore (list, optional) -- List of members of a PDS/PDSE that were overwritten + and need to be restored. + members_to_delete (list, optional) -- List of members of a PDS/PDSE that need to be erased + because they were newly added. """ volumes = [volume] if volume else None @@ -1581,15 +1592,57 @@ def restore_backup(dest, backup, dest_type, use_backup, volume=None): shutil.rmtree(dest, ignore_errors=True) shutil.copytree(backup, dest) else: - data_set.DataSet.ensure_absent(dest, volumes) - if dest_type in data_set.DataSet.MVS_VSAM: + data_set.DataSet.ensure_absent(dest, volumes) repro_cmd = """ REPRO - INDATASET('{0}') - OUTDATASET('{1}')""".format(backup.upper(), dest.upper()) idcams(repro_cmd, authorized=True) + elif dest_type in data_set.DataSet.MVS_SEQ: + response = datasets._copy(backup, dest) + if response.rc != 0: + raise CopyOperationError( + "An error ocurred while restoring {0} from {1}".format(dest, backup), + response.rc, + response.stdout_response, + response.stderr_response + ) else: - datasets.copy(backup, dest) + # TODO: check that new dest also gets erased when it's newly allocated. + # TODO: check other restore_backup path (when allocation fails) + if not members_to_restore: + members_to_restore = [] + if not members_to_delete: + members_to_delete = [] + + for member in members_to_restore: + response = datasets._copy( + "{0}({1})".format(backup, member), + "{0}({1})".format(dest, member) + ) + + if response.rc != 0: + raise CopyOperationError( + "Error ocurred while restoring {0}({1}) from backup {2}".format( + dest, + member, + backup + ), + response.rc, + response.stdout_response, + response.stderr_response + ) + + for member in members_to_delete: + response = datasets._delete_members("{0}({1})".format(dest, member)) + + if response.rc != 0: + raise CopyOperationError( + "Error while deleting {0}({1}) after copy failure".format(dest, member), + response.rc, + response.stdout_response, + response.stderr_response + ) else: data_set.DataSet.ensure_absent(dest, volumes) @@ -2180,7 +2233,7 @@ def run_module(module, arg_def): emergency_backup = tempfile.mkdtemp() emergency_backup = backup_data(dest, dest_ds_type, emergency_backup, tmphlq) else: - emergency_backup = backup_data(dest, dest_ds_type, None, tmphlq) + emergency_backup = backup_data(dest_name, dest_ds_type, None, tmphlq) # If dest is an empty data set, instead create a data set to # use as a model when restoring. else: @@ -2308,7 +2361,14 @@ def run_module(module, arg_def): except CopyOperationError as err: if dest_exists: - restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) + restore_backup( + dest_name, + emergency_backup, + dest_ds_type, + use_backup, + members_to_restore=err.overwritten_members, + members_to_delete=err.new_members + ) raise err finally: if dest_exists: @@ -2504,7 +2564,9 @@ def __init__( stderr=None, stdout_lines=None, stderr_lines=None, - cmd=None + cmd=None, + overwritten_members=None, + new_members=None ): self.json_args = dict( msg=msg, @@ -2515,6 +2577,8 @@ def __init__( stderr_lines=stderr_lines, cmd=cmd, ) + self.overwritten_members = overwritten_members + self.new_members = new_members super().__init__(msg) From c04f506fb97dc7ecc75527d35e3329d32cca8751 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Tue, 25 Oct 2022 15:33:57 -0700 Subject: [PATCH 004/495] Fixed cleanup of dest when module fails --- plugins/modules/zos_copy.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9c3ae782d..f89e12d90 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1608,8 +1608,6 @@ def restore_backup( response.stderr_response ) else: - # TODO: check that new dest also gets erased when it's newly allocated. - # TODO: check other restore_backup path (when allocation fails) if not members_to_restore: members_to_restore = [] if not members_to_delete: @@ -2253,11 +2251,15 @@ def run_module(module, arg_def): dest_data_set=dest_data_set, volume=volume ) + raise Exception() except Exception as err: if dest_exists: restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) erase_backup(emergency_backup, dest_ds_type) - module.fail_json(msg="Unable to allocate destination data set: {0}".format(str(err))) + module.fail_json( + msg="Unable to allocate destination data set: {0}".format(str(err)), + dest_exists=dest_exists + ) # ******************************************************************** # Encoding conversion is only valid if the source is a local file, @@ -2369,6 +2371,7 @@ def run_module(module, arg_def): members_to_restore=err.overwritten_members, members_to_delete=err.new_members ) + err.json_args["dest_exists"] = dest_exists raise err finally: if dest_exists: @@ -2565,6 +2568,7 @@ def __init__( stdout_lines=None, stderr_lines=None, cmd=None, + dest_exists=None, overwritten_members=None, new_members=None ): @@ -2576,6 +2580,7 @@ def __init__( stdout_lines=stdout_lines, stderr_lines=stderr_lines, cmd=cmd, + dest_exists=dest_exists, ) self.overwritten_members = overwritten_members self.new_members = new_members From 9fae4e54caa268c16fe44de8e93fb7f8a26aebfc Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 26 Oct 2022 13:01:11 -0700 Subject: [PATCH 005/495] Removed exception used for debugging --- plugins/modules/zos_copy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index f89e12d90..b316dfa16 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2251,7 +2251,6 @@ def run_module(module, arg_def): dest_data_set=dest_data_set, volume=volume ) - raise Exception() except Exception as err: if dest_exists: restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) From 626d1f19474c676cf4da47f4b87529378e8b667f Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 26 Oct 2022 13:07:32 -0700 Subject: [PATCH 006/495] Added pytest markers --- tests/pytest.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/pytest.ini b/tests/pytest.ini index fd7be108f..b354e0cf8 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -5,3 +5,6 @@ python_functions = test_* markers = ds: dataset test cases. uss: uss test cases. + seq: sequential data sets test cases. + pdse: partitioned data sets test cases. + vsam: VSAM data sets test cases. From 4cbcc53071a622945103f3d407c52d227a38f4c5 Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Thu, 10 Nov 2022 12:23:41 -0600 Subject: [PATCH 007/495] solved pep8 issue --- plugins/modules/zos_encode.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 0fdbb47e5..a4a92a985 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -514,12 +514,12 @@ def run_module(): result = dict(src=src, dest=dest, changed=changed, backup_name=backup_name) except encode.TaggingError as e: module.fail_json( - msg=e.msg, - rc=e.rc, - stdout=e.stdout, - stderr=e.stderr, - stdout_lines=e.stdout.splitlines(), - stderr_lines=e.stderr.splitlines(), + msg=e.msg, + rc=e.rc, + stdout=e.stdout, + stderr=e.stderr, + stdout_lines=e.stdout.splitlines(), + stderr_lines=e.stderr.splitlines(), ) except Exception as e: module.fail_json(msg=repr(e), **result) From 98315a49d6ec631ba53eb9414087fa67046507a6 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Mon, 14 Nov 2022 16:38:01 -0700 Subject: [PATCH 008/495] Added more information to error when restoration fails --- plugins/modules/zos_copy.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index b316dfa16..f984e9195 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1613,30 +1613,39 @@ def restore_backup( if not members_to_delete: members_to_delete = [] - for member in members_to_restore: + for i, member in enumerate(members_to_restore): response = datasets._copy( "{0}({1})".format(backup, member), "{0}({1})".format(dest, member) ) if response.rc != 0: + # In case of a failure, we'll assume that all past + # members in the list (with index < i) were restored successfully. raise CopyOperationError( - "Error ocurred while restoring {0}({1}) from backup {2}".format( + "Error ocurred while restoring {0}({1}) from backup {2}.".format( dest, member, backup + ) + " Members restored: {0}. Members that didn't get restored: {1}".format( + members_to_restore[:i], + members_to_restore[i:] ), response.rc, response.stdout_response, response.stderr_response ) - for member in members_to_delete: + for i, member in enumerate(members_to_delete): response = datasets._delete_members("{0}({1})".format(dest, member)) if response.rc != 0: raise CopyOperationError( - "Error while deleting {0}({1}) after copy failure".format(dest, member), + "Error while deleting {0}({1}) after copy failure.".format(dest, member) + + " Members deleted: {0}. Members not able to be deleted: {1}".format( + members_to_delete[:i], + members_to_delete[i:] + ), response.rc, response.stdout_response, response.stderr_response From e3a270f1f11b78a319b62e83e6e8a2221de15ca9 Mon Sep 17 00:00:00 2001 From: Demetri Date: Tue, 29 Nov 2022 13:59:07 -0800 Subject: [PATCH 009/495] Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos * Correct typo Signed-off-by: ddimatos * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos Signed-off-by: ddimatos --- Makefile | 283 ++++++++++++++++++---- make.env.encrypt | 449 +++++++++++++++++++++-------------- scripts/mount-shr.sh.encrypt | 71 ++++++ scripts/profile-shr.encrypt | 197 +++++++++++++++ 4 files changed, 767 insertions(+), 233 deletions(-) create mode 100644 scripts/mount-shr.sh.encrypt create mode 100644 scripts/profile-shr.encrypt diff --git a/Makefile b/Makefile index 3a95c29fb..da868e7b9 100644 --- a/Makefile +++ b/Makefile @@ -47,42 +47,133 @@ divider="====================================================================" .PHONY: help Makefile -## Encrypt the `make.env` configuration file as `make.env.encrypt` with user specified password +## Encrypt the configuration files with a `.encrypt` suffix for files +## [make.env, mount-shr.sh, profile-shr] with user specified password. +## If no password is provided, you will be prompted to enter a password for each +## file being encrypted. ## Example: +## $ make encrypt password= ## $ make encrypt ## Note: This is not a common operation, unless you tend to edit the configuration, avoid using this feature. encrypt: @# -------------------------------------------------------------------------- - @# Check to see if there is a make.env if not exit before deleting the - @# encrypted make.env.encrypt + @# Check to see if there is an unencrypted file(s) to encrypt, you would not + @# want to delete the encrypted version if the original unecrypted is not + @# present as there would be no recovery process then. @# -------------------------------------------------------------------------- @if test ! -e make.env; then \ - echo "No configuration file 'make.env' found in $(CURR_DIR) "; \ + echo "File 'make.env' could not be found in $(CURR_DIR)"; \ exit 1; \ fi + @if test ! -e scripts/mount-shr.sh; then \ + echo "File 'mount-shr.sh' could not be found in $(CURR_DIR)/scripts. "; \ + exit 1; \ + fi + + @if test ! -e scripts/profile-shr; then \ + echo "File 'profile-shr' could not found in $(CURR_DIR)/scripts. "; \ + exit 1; \ + fi + + @# -------------------------------------------------------------------------- + @# Check to see if there an encrypted version of the file, if so delete it + @# so it can be encrypted. + @# -------------------------------------------------------------------------- + @if test -e make.env.encrypt; then \ - echo "Remvoing file 'make.env.encrypt' found in $(CURR_DIR)."; \ + echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ rm -rf make.env.encrypt; \ fi - @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env + @if test -e scripts/mount-shr.sh.encrypt; then \ + echo "Remvoing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ + rm -rf scripts/mount-shr.sh.encrypt; \ + fi + + @if test -e scripts/profile-shr.encrypt; then \ + echo "Remvoing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ + rm -rf scripts/profile-shr.encrypt; \ + fi + + @# -------------------------------------------------------------------------- + @# Encrypt the files since we have verified the uncrypted versions exist + @# Note: we should move make.env to scripts as well + @# -------------------------------------------------------------------------- + + ifdef password + @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin + # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + @rm -f scripts/mount-shr.sh + + @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin + # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + @rm -f scripts/profile-shr -## Decrypt the `make.env.encrypt` configuration file as `make.env` with user specified password + @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env + else + @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt + # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + @rm -f scripts/mount-shr.sh + + @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt + # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + @rm -f scripts/profile-shr + + @openssl bf -a -in make.env -out make.env.encrypt + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env + endif +## Decrypt all scripts used with this Makefile using the user specified password +## Files include: ["mount-shr.sh", "profile-shr", "make.env"] +## If no password is provided, you will be prompted to enter a password for each +## file being decrypted. ## Example: +## $ make encrypt password= ## $ make decrypt decrypt: @# -------------------------------------------------------------------------- - @# Check configuration exits + @# Check configuration files exit @# -------------------------------------------------------------------------- + @if test ! -e scripts/mount-shr.sh.encrypt; then \ + echo "File 'mount-shr.sh.encrypt' not found in scripts/mount-shr.sh.encrypt"; \ + exit 1; \ + fi + + @if test ! -e scripts/profile-shr.encrypt; then \ + echo "File 'scripts/profile-shr.encrypt' not found in scripts/profile-shr.encrypt"; \ + exit 1; \ + fi + @if test ! -e make.env.encrypt; then \ - echo "No configuration file 'make.env.encrypt' found in $(CURR_DIR) "; \ + echo "File 'make.env.encrypt' not found in $(CURR_DIR)"; \ exit 1; \ fi - @openssl bf -d -a -in make.env.encrypt > make.env - @chmod 700 make.env + @# ------------------------------------------------------------------------- + @# Decrypt configuration files + @# ------------------------------------------------------------------------- + ifdef password + @echo "${password}" | openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh -pass stdin + @chmod 700 scripts/mount-shr.sh + + @echo "${password}" | openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr -pass stdin + @chmod 700 scripts/profile-shr + + @echo "${password}" | openssl bf -d -a -in make.env.encrypt -out make.env -pass stdin + @chmod 700 make.env + else + @openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh + @chmod 700 scripts/mount-shr.sh + + @openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr + @chmod 700 scripts/profile-shr + + @openssl bf -d -a -in make.env.encrypt -out make.env + @chmod 700 make.env + endif # ============================================================================== # Set up your venv, currently its hard coded to `venv` and designed to look first @@ -98,34 +189,41 @@ decrypt: ## $ make vsetup req=tests/requirements.txt vsetup: + @# ------------------------------------------------------------------------- + @# Create the virtual environment directory if it does not exist + @# ------------------------------------------------------------------------- @if test ! -d $(VENV); then \ echo $(divider); \ - echo "Creating python virtual environment 'venv'."; \ + echo "Creating python virtual environment directory $(VENV)."; \ echo $(divider); \ $(HOST_PYTHON) -m venv $(VENV); \ else \ echo "Virtual environment already exists, no changes made."; \ fi - @if test ! -e $(VENV)/make.env; then \ + @# ------------------------------------------------------------------------- + @# Check if files exist in venv, if they do we should not decrypt/replace + @# them as they could have edits and risk losing them. + @# ------------------------------------------------------------------------- + + @if test ! -e $(VENV)/make.env && \ + test ! -e $(VENV)/mount-shr.sh && \ + test ! -e $(VENV)/profile-shr; then \ echo $(divider); \ - echo "Decrypting configuration file into $(VENV)/make.env."; \ + echo "Decrypting files into $(VENV)."; \ echo $(divider); \ make decrypt; \ mv make.env $(VENV)/; \ + mv scripts/mount-shr.sh $(VENV)/; \ + mv scripts/profile-shr $(VENV)/; \ else \ - echo "Configuration file $(VENV)/make.env already exists, no changes made."; \ - fi - - @if test -e $(VENV)/requirements.txt; then \ - echo "Requirements file $(VENV)/requirements.txt already exists, no new packages installed."; \ - exit 1; \ + echo "Files $(VENV)/[make.env, mount-shr.sh,profile-shr] already exist, no changes made."; \ fi ifdef req @if test -f ${req}; then \ echo $(divider); \ - echo "Installing user provided python requirements into 'venv'."; \ + echo "Installing user provided python requirements into $(VENV)."; \ echo $(divider); \ cp ${req} ${VENV}/requirements.txt; \ . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ @@ -133,7 +231,7 @@ vsetup: else @if test ! -e $(VENV)/requirements.txt; then \ echo $(divider); \ - echo "Installing python requirements into 'venv'."; \ + echo "Installing default python requirements into $(VENV)."; \ echo $(divider); \ echo $$(${VENV}/./make.env --req)>${VENV}/requirements.txt; \ . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ @@ -143,7 +241,8 @@ vsetup: endif # ============================================================================== -# Normally you don't need to activate your venv, but should you want to, you can +# You don't need to activate your venv with this Makefile, but should you want +# to, you can with vstart. # ============================================================================== ## Start the venv if you plan to work in a python virtual environment ## Example: @@ -177,6 +276,10 @@ vstop: ## Example: ## $ make build build: + @echo $(divider) + @echo "Building Ansible collection based on local branch and installing." + @echo $(divider) + @. $(VENV_BIN)/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ ansible-galaxy collection build && \ ansible-galaxy collection install -f ibm-ibm_zos_core-* @@ -233,6 +336,7 @@ test: @# -------------------------------------------------------------------------- @# Check configuration was created in venv/config.yml, else error and exit @# -------------------------------------------------------------------------- + @if test ! -e $(VENV)/config.yml; then \ echo "No configuration created in $(VENV)/config.yml "; \ exit 1; \ @@ -347,11 +451,15 @@ install: ## Example: ## $ make version version: + @echo $(divider) + @echo "Obtaining Ansible collection version installed on this controller." + @echo $(divider) + @cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ |grep version|cut -d ':' -f 2 | sed "s/,*$\//g" | tr -d '"'; # ============================================================================== -# Check the version of the ibm_zos_core collection installed +# Print the configuration used to connect to the managed node for functional tests # ============================================================================== ## Print the contents of the config file (venv/config.yml) which is used to ## connect to the managed z/OS node to run functional tests on. This will only @@ -368,20 +476,50 @@ printConfig: fi # ============================================================================== -# Check the version of the ibm_zos_core collection installed +# Print the make.env contents # ============================================================================== ## Print the contents of the venv/make.env, this only works if ## you have set up a venv using `make vsetup` because a password is required to ## decrypt and a decrypted copy will be placed in the venv. ## Example: -## $ make printenv -printenv: +## $ make printEnv +printEnv: @if test -e $(VENV)/make.env; then \ cat $(VENV)/make.env; \ else \ echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi +# ============================================================================== +# Print the make.env contents +# ============================================================================== +## Print the contents of the venv/mount-shr.sh, this only works if +## you have set up a venv using `make vsetup` because a password is required to +## decrypt and a decrypted copy will be placed in the venv. +## Example: +## $ make printMount +printMount: + @if test -e $(VENV)/mount-shr.sh; then \ + cat $(VENV)/mount-shr.sh; \ + else \ + echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ + fi + +# ============================================================================== +# Print the make.env contents +# ============================================================================== +## Print the contents of the venv/profile-shr, this only works if +## you have set up a venv using `make vsetup` because a password is required to +## decrypt and a decrypted copy will be placed in the venv. +## Example: +## $ make printEnv +printProfile: + @if test -e $(VENV)/profile-shr; then \ + cat $(VENV)/profile-shr; \ + else \ + echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ + fi + # ============================================================================== # Cleanup and teardown based on user selection # ============================================================================== @@ -396,11 +534,6 @@ printenv: ## $ make clean level=all ## $ make clean level=min clean: - @echo $(divider) - @echo "Deleting venv" - @echo $(divider) - @rm -rf $(VENV) - ifdef level ifeq ($(level),all) @echo $(divider) @@ -416,7 +549,11 @@ clean: ifeq ($(level),min) @echo $(divider); @echo "Minimum teardown selected."; + @echo "Deleting files = [make.env, mount-shr.sh, profile-shr]."; @echo $(divider); + @rm -rf $(VENV)/make.env + @rm -rf $(VENV)/mount-shr.sh + @rm -rf $(VENV)/profile-shr endif @if test -e tests/functional/modules/test_module_security.txt; then \ @@ -426,17 +563,23 @@ clean: mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ fi - @if test -e make.env; then \ - echo $(divider); \ - echo "Encrypting 'make.env' to 'make.env.encrypt'"; \ - echo $(divider); \ - make encrypt; \ - fi + # Unsure really need or even want to do this as part of cleanup + # @if test -e make.env; then \ + # echo $(divider); \ + # echo "Found uncrypted files, encrypting them."; \ + # echo $(divider); \ + # make encrypt; \ + # fi else - @echo "No level has been set for this target, please set a level." + @echo $(divider) + @echo "Default teardown, deleting $(VENV)" + @echo $(divider) + @rm -rf $(VENV) endif -## Copy your ssh key to a `host` or the default which is your username. You must +## Copy your ssh key to a `host` or the default which is your username. If you are +## copying a key to a production server, a second key will be copied used by the +# jenkins node, this minimizes the number of times you must copy a key. You must ## have set up a venv `venv` as that is where the environment script and configurations ## get written to manage this make file. It avoids continued decryption prompts to ## force users to set up the venv via `vsetup` @@ -444,14 +587,49 @@ clean: ## host - choose from a known host or don't set a value for the default operation ## which is to user your username to look up your default system ## Example: -## $ make copyKey host=ec01132a +## $ make copyKey host=ec33012a ## $ make copyKey copyKey: + @echo $(divider) + @echo "Copying SSH keys to the managed node authorized_keys." + @echo $(divider) + ifdef host - ${VENV}/./make.env --cert ${host} + @${VENV}/./make.env --cert ${host} else - $(eval username := $(shell whoami)) - ${VENV}/./make.env --cert ${username} + @$(eval username := $(shell whoami)) + @${VENV}/./make.env --cert ${username} + endif + +## Copy your ssh key to a `host` or the default which is your username. Then +## copy the super share mount script and profile for the mounts, execute the +## mount script and exit, upon rmote ssh, `profile-shr` will be located +## at `/u/${user} where user is defined in the make.env `host_list`. You must +## have set up a venv `venv` as that is where the environment script and configurations +## get written to manage this make file. It avoids continued decryption prompts to +## force users to set up the venv via `vsetup` +## Options: +## host - choose from a known host or don't set a value for the default operation +## which is to user your username to look up your default system +## Example: +## $ make mountProfile host=ec33012a +## $ make mountProfile +mountProfile: + ifdef host + @make copyKey host=${host} + @echo $(divider) + @echo "Copying mount script to managed node and executing." + @echo "Copying profile-shr to managed node." + @echo $(divider) + @${VENV}/./make.env --files "${host}" "${VENV}/mount-shr.sh" "${VENV}/profile-shr" + else + @make copyKey + @echo $(divider) + @echo "Copying mount script to managed node and executing." + @echo "Copying profile-shr to managed node." + @echo $(divider) + @$(eval username := $(shell whoami)) + @${VENV}/./make.env --files ${username} $(VENV)/mount-shr.sh $(VENV)/profile-shr endif ## Display the z/OS managed nodes available and configured. This will show which @@ -459,20 +637,20 @@ copyKey: ## Example: ## $ make printTargets printTargets: - ${VENV}/./make.env --targets + @${VENV}/./make.env --targets ## Build the changelog, this should be a release activity otherwise the generated ## files should not be checked in. ## Example: -## $ make buildChangelog -buildChangelog: +## $ make buildChglog +buildChglog: @. $(VENV_BIN)/activate && antsibull-changelog release ## Update the documentation for the collection after module doc changes have been ## made. This simply calls the make file in the docs directory, see the make file ## there for additional options. ## Example: -## $ make buildChangelog +## $ make buildDoc buildDoc: @. $(VENV_BIN)/activate && make -C docs clean @. $(VENV_BIN)/activate && make -C docs module-doc @@ -482,11 +660,14 @@ buildDoc: ## Cleanup and remove geneated doc for the collection if its not going to be ## checked in ## Example: -## $ make buildChangelog +## $ make cleanDoc cleanDoc: @. $(VENV_BIN)/activate && make -C docs clean + # ============================================================================== -# Cleanup and teardown based on user selection +# Self documenting code that when comments are created as expected, the help +# is auto generated. Supports multiline comments when comments are prefixed with +# 2 pound signs and a space, see examples in this makefile. # ============================================================================== ## Help on how how to use this Makefile, options and examples. help: diff --git a/make.env.encrypt b/make.env.encrypt index 678e78381..ad7ae2396 100644 --- a/make.env.encrypt +++ b/make.env.encrypt @@ -1,182 +1,267 @@ -U2FsdGVkX1+h3hd2AlxuoIMQpQFeOb4GeCx4Y0NZ+oWHMlO5G3Ytxln6awGw4REt -UiB8NWzVroF24yJoAC6F2Xwz4MzskKFYsHKLU90061OIfu8I3BCuNGSLz9Hsw/cW -yRwkIdd0VjdVV7Wuk60dHFlHFHrY/kjrwv0lHymLla07ymOV9JUgCJutVdSNzWX0 -1CAXSHUVBeOJ7nuKVnknlanAMNsa2/IDyVE5GkrbE2mYD1cCbapIKhpf9yAiqICf -nl/m8XxZHj+MgssC1PC7JFyMc7mHxWwJdruQUXgLHtcHYDHu8Qd8TDztsOFkdsE5 -v1qRdvTifjOOA3zUE29e1WEHlky6ThtzNa9P/1z4pPMS5r2HUOoLONfgvO+0oeP3 -UhmW26BUuOwAxSIf3L+JZ/ZigUt04LJe3ah9PD5KiN/SokE1Rynwej4yxnGRUMeW -OLj/tsQCnxJf7Tx/p3NcqorAjFt1h7wCjxazoGpxiJJWcaFLyH6j/LSBezpU43uJ -9nCiT6zKsTDNpODntZEVWykIri+2xPfOq/nIYag1KyKdPeVw+PX1rvDkWcS4WoQ7 -5yYj0q580zDaHi7qHrLdhd1bbNaiHxJpgPH/q9goMq5Yt+V32BGUAwnkf546EouZ -ywDfoVwr27QkAfw00Mnu/+SUjAUlCupi+GbgaE5pbKcyBYh1ZqLoJRKkrFkNkJCO -vVFAVPbHsPMHdGg/0hgMTE3KIk4tUMZ4VfngdGdgkGE0p8EnVM3pdrGEpTI63wqb -axw2mNmzLwMSYfl0FJVdcUU85K4kSvyitk3pLvdCoLO3cXMmUPObJXEyLSFh2JpG -qEcn5tPf76Jei52LbtFXBHQmr7Usktx2RLC2y06v9OAfu9godOiHTDRFaPfxNBUQ -OmoL6xmNjdGoIg3veYSIPsSCMgid8AF6xUmOFCYJT+NfyUE/klcWKc5ZQY6c5ukL -GjZ5vlDyyB05ADeQiQSJ1stO7kwl8UduJ/gXfeZ+arOSH3SxZg/WjDewoqKAuMvB -lWzwvtqHeYcfFFNi9KpXEQS1r0YAJREWfQQPg0yK6oxpwnPScOqLS4pkr8ThLzZD -/Qu7/R1H0PiY+aAyTJwXmJLgWl8ZeA+FeORD+YRASzlv10bA+k4DN8iasO/MkR5n -mIilzEJyyYMoQZN1GlafsBkaqg/4fNJV0wtnQBswTWWTr6+DYBSt7gcy6Bt5hbDY -msdL1Jgcqt9HXBKSK5c0L9r4DtCbKaFvyLIjUFKID4/cDqKFAkxoagwGNIpq242g -qkSHxRwyxc4S5PIQvRxf3/hfQErYS6in7s2yYQaa8R+246CRwwnYBx/sl4PXmlsk -rvVeiSGGsl4ZZsU0s50cLCs7jmU2yqeMT6hS/brQkcD0MwzRXbuqa5kaz2LD6vO6 -t/g/YPkMv6FBtnFodEkJYesE+uhe/vA4a0NrlKqWfbBRQai/HCUwwj+uT4V0YiFM -Ib4ZMJq6m1VEcp/+xU0ABPaW9KD5z0olNMgcA4N75gjCueQbQSLh1N4ESzwO98J5 -9Eb9qmldSoprkOdHxZRhPO1KBGFDnnmiuHB7wamimS0Ts17n0JBYqdlCHlFOX91g -ZwYBZpbBt7bSupgNgxISY/RucEcilJJlziaNbSCcYvZKw7VKAbObyXSfaqPeYbS4 -Xe2wsyJMuhEQBqHGhnS2gWsUZqs1PIV1pgmVWd7VaTgrX2WrspVRZSFlP5HITLnp -/VY2xxBxkjTHlLU4qqF8+c6hrgVmIjmeLqfFHU8M9sKu+ZJCOBbE1bOsTAxEZoLw -qYw1Y2KdP/ZAIbzRpmxhAJmPtkWfGrTLMsq00Hauz1u/e6pDIXvWVUZSvk7dDL4+ -29jSWTXI4vqNv4mc4DiB3hqIYUGiUt16YqEIXiSXL+fVM1gQG3qZkJMbCr/y5G39 -DCXe+yk5i6g7gmpxBRwmOwBLgA+4toiZcls/3VG9VtbUPQ+aXYbMKq3puFeDAbDO -zsC3J8o+n39JL3gi7lpstsk5UTVi625k4Hja052wBe8vwx7rnuPPh7TV4NsJpb0O -AbWf0x66vif2kamtRj+bq1aqc+9CSuCGbWt1P/Vh3FG/wOsUrs/60EpXIERFdPxU -0HBg7Obp3gdCzzQIJBDurK2RTO5om6yht6FOwj/RwnH9FudtcE8UyN+BexHpxcqE -uX/2ON/l3Rrn1jfu8xOFWb8bz3LgQbvgWO645b5x4PTbvgezNjGhoYfkRZ9ouM+a -OI0+JCm/w4vMdyumzvJUhqLFdDfDIRxp+z0Qohcr9BnHsOHK5Fm7C3li5rx975ZB -ETDE4kUPr3NkQmPCzICsKmDzwjFaqddR5n9ORaEdAvTxp8B/qlUaVESKvsEbAguI -fHBixQa9VnK6JMG527LGCqC1bQrh75iEXAVdvDgIrnVwSK5ZmnApzJMs0gI/QIoy -cYE+v5yn8SpfBC+NBbDaiUqHU3lPPNiZinlhzO22MI8/1ivl2c0rjwBO1pMqEyWC -VzIv2pwiTBDy4JOWlIEB2SCV8co1MHOypWypE+ccZUfpPZaoNmin/MzbnSe5/tmH -NdI/BqXCv2DVODHT8otpEozYzKylJ4IKMJbS56aQsC+TNLca3uuUNHqSI5dg3/X9 -/kLXZUtUsHt2kvO9md6HnXGIk+kzpPGX5SCmManNxGBOB+a3mxg+/tx4cUHwnLmr -DT/L5BffrDHGn9cFy1XgVKGheLng1xxqQwa9rV5fadepz2QhAzjQ6xH60ojXHpmr -BIp6BF6irlg1w8OLDv5S0VQrNvqnbVByY4NQNeaGcCGLccPmFSrJOuUdPHvdnAo5 -3g4oMcF35661Rqasqv7D9XXSJ0pyEyYg8wcaF2hMVV5e7WMrAMj74RPO7H4VHsqc -rj2UT2Ww9euknHuKYnFteoBCgjMblZiRD3v6Z8fZoXK8Jq1x9OrlYEHevYD2rkDs -VD+0FlwWibs6z14XMUVlk9yYKUYcJnnYytHr8L6/6THJS+dHc/3X3Km4VqgcZwKu -BVs6orcjS1Gr4aowaT8Z9F+R2f/1ouizBf1x3w8BFwIQ0SmIDAPDL6Sa9GcUYNAF -WKNpFI1IVnX2kvMAzonZYM16yFTAWG7ulZmE1+QWp3kxV7RjVCVb8UXIBbCNq6yA -KBUR2sjMGuY8iMBstyesMJLaGS6nqe5kopUxX5otLzuukp2hnyqg6SO50ZagMTqs -faSVEnLRR87KJelbsn9ImN2rlwCEM4djOx7TxFUk5wjzQTaHFg+Ofpp+GILQAQ0C -6Xun97z3IPBAUseyjItg6RNlK/qvlIbwh7a99qTOp5S500r/2Q/MXuIN1xsgJ+Vu -M8dCS9UUDRrlDjX07V4fm78qCe3aCgD4g4r4oGvvrlEl8qeLWhYQQriVCMPpnSFl -17Vb/SAalfP86CaLXpApVrqx7uwcmo3rO6GKTSXdkwQBr6vXRpP2/eknfaiPb1SI -cfOY7HPzjbWYonRZvXlYpE37ibggQlkTPfTQ0D7gKtMyARKjw/TJJnytPFPx9Ilc -5HF9dZPnOPNH5CTpeh2qt1T9piCWF+sYH2W0Tvyy933mfU6g9iOeAxg0IDzJ41C7 -JhodTDjD6Y6a/v6FD1dYQ8V+5ottzkwrWC1I3/anUGvA/jeQe3SDorU0gCf0G079 -MCv3G2eFRz152HKCC7YLpW1wKuaEo5bpyuLRjUF83hNsPKGH5ZwBoWuVnzh0VWnQ -xanccfpJoDRUPkfz1JtIeDZleiIY59EmSZuOvtH+xWche2v/aov+JAODFI8z056c -KYFxLFGBaplYka4DhuYPOjQOyW4uMH+nYQCqF+zErOA5CrNj6rSLQXLN4r5uAqyh -IlpxXcYNKHGGNAS2BrQS136m49QPWC8b+i/zsQKcykRdPaJws11/xQXif+ZMN37g -zEVkOzpv00w+fB+qkCB8IDkjKsR771dYTzZfWE5/z7/D0a2zCC68KBSWWi1bFnG/ -3ELB3LAX6Qe4cLlG0RIA8rE7/Ff3EWd7GBbnQaRqZu2IpZBkl/kAxlaIIkNv70lj -C1jP9k+wPrf7oiRjWWNvWx/E3+S1nZJeU2Xzlq2PG05/2B2PAs3GSe9i3SY/4Ov4 -OO9WR+e2CsWeaQU6b4eQ3IrZEnKiYBcTYwAaXyYq/LKFM9BSu+erGU8V7MbtRjmg -JkjTjbu+1H9SWYwcRAcrTP8gbhiWvpa48Aa2icj9s+5eV8tkhnsqQaRzqYTyX1QF -rNGhq60Oy1jzEjz/jIgyIJJiX6/gbkgIQpawgh75tSpIFDi2ra/AixvFwPHpCCgx -AeHQ/Lzf5l2UzypMkAG7C7PchuFztv9Tb6vHAszyVzOBzS95Lgr66WYQKsfLPQif -GPO8d/XR9aXC/V5uFsPJpPW/gFrFAgAhr8q4MTn4OSL1EWT6VryufQ14OpMVnADa -qG98G11W0SFMNVbuDEd4AtBlbqxRavCejY2AntMDG8RmLOdkpAB8eIOb0OJN/XUY -fm+Ljs+eUiU3TuCMjlUyJMePJJJacb5vO2h70sJikl84O3gAFMzdE3p2aDWtFqX8 -r6K1TYZIWk4afJeIFLn5IftqAf3GJbPvM2qMp8q8hrQhRtc5RUr5Z9xc/WCoP1P5 -GrSBIeS3EEF+KsVgfOLbltfidvI5bmX0nz1OLJv/1Yp9JjDG3LJV+wnKWePNzoDp -eS+XFdvZJY2OWnjhfDA42fVtTdQb+Cfhljdj6XJCbT1Q0KD9o/uN47IK24csXX3v -+xIQRqvslzndrtr7KE3Jwg72exZo497C2T6WpIEw3UCPCiWLfogwKtW+fen05PFp -BkuCAzxZJn1MHDpUimQWoyRWuZuNQ3vztWWnKj9cejYaXIcbHheezb0xqg3gBONQ -gg0DKaSW2ULmIOIOp5DIXnKf9ag3JQpDsKXnjAsKaG0Ii62Y7xtW6vmCkbygjWsG -mIC2ivf1gZfx7+6xdmzXnyMZxeW24/eeANl9QQz9Ai7MmDbJ8yXP/M9FNM2a/DaM -q79MZLZW/t/vNu7yyZYh/6qks94Sk6kmrQQwQLlm3+H9sDDF1MUITtJnf8N2+lx8 -b3uhKPylpooNGJLLEKidhmCfWOQ2um6rzUVyKFizLkFPsJx3k241sIfHHjTB3RDB -+Lfqt31Q9RHSxwi7sXdGO8lZTWILUYqtrp0qpIbyYbCFj4qKCgH3i5G/rB70FadV -t1Mqu079ujuZVtzKZcF4GRqYYE+Kr0zOuwuhchwNHSAIzJjfZZ0rTZYWzuUS9sMi -LMQlcU1Sa7IcZcgiWXsAzrSoE2th8m+jMuzCWTxUUzwoq/3pVPGik63MlcL0qkz2 -0TJ4TIGMA2L/tLdJoZNXwFABIkDLJqQ0isCTUAWi7b/d4iVXpZsOgmgDs/aBJGxG -If7Jrefz/Ho9ROE0C5T80MgeOitfIWcs5rIZdDpMZWzKITE6GYj6NCyu9SE68LEz -VyUawZXZyGLI8GH0pwfUc+dapsum7BRVPw+MrhaY088QuqYTDYOBNxkoM1ylt6eH -63PpW8RoPIQq+RTwdN0WN10ocEm0B+0KAi8FU/fwAjORdDlvsjBAmyMNyPUVOZSf -CO4VrQbHG8hLhMNtH/AOrVmN5jm+BbrmwF/3oyXW8VkYpsEwwvWob0dufgSAOPJQ -5X5vCjArjtsQXFXi1gjJpYB+Ik43b+mS+iwiwk6OB9Shtyfl8DA8lpMZoQpbUnFb -Cx2YWZHr/oJb+Ab4i1c6QC4OEZfSvG4LABlt0NBdIjQk0pgf4rfMSEOwT/5s5F9Y -9bN10XwrYbZUg1TAtEHylxqfSGuRF0cOd5PcTQtx0e65YlTpYA1C1DS6TrWukk30 -S5Hf+bvsws9HDo2Wj1im3NNcBrX8w2q7SJINVOs8pqX5MfD/b0mQdD1fRQPIfmju -tYPqZmdTpsMNT8j9sJ5Fg0yL1wHchGlaw9D29ZDa604UUH4qMzC1m0Wy6QMdLE2P -1AQwDLizq2gTvS+pNp6sEMbO+hKZD4l33Ps5MrPY8rQrUDdUIwJ9jNrvRu776KG4 -uWqCwEB2LNcHXbWF9kARkfMWbXseRZicm6c6UtXCUvWufepCUXN3h9UM0pghJGVq -J+JH5VIfpcb5kml9zdcYtiHAFzTnJXNBD9yKv8KP4yFwqPoUTsV3jO1+RGsxiZ9t -h620KpFVFi1Ass8DMRJAv088Yxj0NG17mgPLPv/NPo13/dZXV1l6bv6H8jmRDyI4 -vcfkVIRhRyujb9PR0Jd3CrMI695yK+zsxScn9sP4KtgP/dbDONfES4jkE+pou5JY -/+oCqcgVkvJ5o4d7m680bnXBv7/5mxAKwBret6LD5VV0HdUU5/VAfWs3yN0v7hw7 -8oFxQ7TG29oE9IrWhhPivKMvcy+yu49wwzJYKSEnoRZRRmvgy97nZLRKJBDK3hT0 -Y/TMXmVamg9IcrV2447mtqSt6Rkd+kIJVZevUJKOiRtOIF7QmX9ZFkXjYU5VVobG -T45WTrFZSMbQFEt3YZggL5e2fWjWeJQX3blio3daEwZHh0gMx2E+k9j2y9sENP0a -GfBdjff44hnylxceS16WSCwERLAhdgwDH8a/oWqmiswAzeIOs8hzFZDFyqvTb0iZ -mepn/wIdxewODHw2gPh89wi8q80fc1cNg5xjV3HoYwkj1LruKglAAnW92VfgzYNR -nd9GiVwnDHdElQ15grVIiMhhZ70DXzqAXf8jW06wMh1lGqLnkxZXAvEkN6aN6vFQ -TodRB0vA6+4W5sT0cDS46ej6XdWVKv1VoWL8h7ZgFozoG0dfy393fNZRL/QKfnla -YmJV7+lHssQEWrRoWGDom85gmuG3pAMLZsPepAsdOIRXRjxLL2BSPMQ/6OzYLCxW -AnBeXyjmrQRJ16rfo0+nPUYVuJ76JvC+EmLEhx9MjiOyEMjvdSqvfHziUrHviWI1 -uncm+h4CI+ZiP/9V52PHUG4+hb0t5TdBVx2p/YKX483XJrfGdr2v8tzTOEl2N+dV -bW7KHTghkhAajBjZ8zKqJTfkq7r1o7Z75y2yPCNprouWf9R5HtjGldmjM1F2P+eZ -Cq4o53ijn9SXmcBIcSFV5XSUe0MNrzDI0pfAhdQuripiT1Oh1XvRgdgghRD/3fuE -kj6pxJVRvVBwJNKMVLiVDZq3wXQZwex+BCiC+83I+uMBTVIG9p+Q5NEE5Zx0YO+L -xhBYjQ6YQ0bBaXrAdxBiy+m4sfCpfzAqujitHJxRYzQ+AeolKYRTYS/ReQk8vWqt -EmJPuKuHEsnWA5dqFMdTazydj7we2OhvspRPkSeMmJ5MqrvM1i379e56u1U/tF1l -GnkkPNPLDHlBkN9O5bAX2DeafbVwpqdgO+M0ea3eeOwEg4vLrQgyJfRoGWZZhjQK -vGoGTjFHmAnrYQqGu0ZAeXvxJB8gajTrxKZzLxVikzqsJyTlYRyQfJZS59jr4MWN -IKjGzO2ggWdTblamM7io6fFSoXnCN0651Kd2vfhy6/ak5q8FeZHN8zHmolzfYjWt -sGUq+uGiFrRjHOUEu7cmwKYusvKZM5Mu3B/aIgH7lg+N0wR7YSZkBtoC9CHKNoVA -GMswVd7d8rL31/AQLwSuzs4vlqEh7OL22XR1ZRHzPBdldMA8wINZn3Ym7Z79mmwP -xB7H6JFd36NO+AR8RzKZSuB2cjJjE5JYC5dBhFec5LWqeU+x7/tgtLAfbdCku5ve -y2RFMVaxQAcVHs5Eb1Z6nlqaTSCJT8iDOAPLKJ7/UqAIImD5YG5sWOcNwvW5MhaU -cXJan7uG86WOtvJeWScNRKqg1JVOCXeCgGWisI3MRsu2UUmIR97sTLHDfUqLRNa5 -qVrVAAU1LaR2AGeA/WMpQy+0u5/YxaPf3q2wPnsqYidSD+6SeBKQXDsYG2/gDath -RUEJeZGNiqMT9u/qIGbSKMKeQYb0bDPYOqQYTKWZWmDE+2i4XFZvxOTUeFUkQpmf -tjHBc4vANCWVqlkW/cYLxpqbVtlxUX26Z8yIkJq5tqm5dgF/O8urtQd2lQ7+pG3V -CtAAmE7pU9NqQ4g8SB7+gKDJF4d1Lzv4k8i4ibZY29GxgR/g4l8Li6ceT5oWIOgn -sPtfzzYZYVOLaYhoyv8Q8Nj+S1cEk7rhsgV5nnBRi4j1b42TAKkKQmTChHymK4IG -wq/Bik4DYNU+vBWihleO5yXGQpLFoPVbSk91dxxkgk2Hj9rpEWTxjmSRFjKCr8Xm -Lo2cgfKeCukiNk18NNig5sjqmkYMfTjqVlv3YBcX6BQVChhXf+EzvhvaASV3+aiQ -ztS6SUMROZznqxwZvANK2MhBN9pJOZthmzYQ22ifmDaxV3FlTODB7sYtOdyS31Nh -447MPBwlZl5aj7xRC2Z66bABMiyVmFTBmuZcqV4qS8fYUmpKryf3On7vyGIiJch4 -PwfqN4rh0J1rGqDUmqZyf7fsPU947G8FgkkT4QlGLoYzvbNk3aVpRIH3cMyWdEGc -0qDbMKXrcDbuPRLe2IG+FvYw1I9EKRnls04oltWMZ127k6Dxz9oSRrpWrR0TwwKo -1cJRRxDTo7zdsn+2Cq5gkuDqsA5UaO8ejRicgdvntUkxeuxToi2Y/Rv/GwTXNehj -6SWqA63sJmwYZEqMyTquy2sxS2Bb6EtBUfHlmoaEKr4jFRih6IaGcY+M4n/tysSC -8KpmDaT+0zgSRKzgksl3cVQxjHbt+vsSXHzl7D+LAahOVRXzgftnryTM47dGO+lg -sCuiEqhLQw2mrUeU9RSppQzVRnOz+4JD2YJr9az6a9L4jHsgG1KymuExePkTWB4r -CUfuAEiRAgkp3i2WwsZwXYt3h9KRAI88YdywxUOmuN6MsFkrHVRTajwCajJ95/62 -dQx47VnnHkPZD5P9a2n9ilUx12bSE7JRvehtfWP9utvokYMRc8bOuXFgLjTJD0GD -UzID74gJx7cayX2+afZXNTMeMIK2aESnmVosHZgRNYEbul7maRYEMl3V8I8wUcEA -QcRaW7YV4uz5tOZBTLipa17/kD6E4hcLoO3xcq+BobxnAFE4cK5VV9mw/1aw80W5 -dk32iuYZhTfd5WGPOCXGYEon8NT+OvK9SMzaL/O/Fwy1N71y/iUZdYFoKaiTGhJJ -ThA0+gXQTxRqu4hwlC9oM12DHuigqarDanGmRyqyGmzbffg2zPGEXpPNV3c+8kmJ -/RctCx5QozpeoUXKlK/OsQBPzSTO32pK9LxyI2AXc9ofIhdCLSA3OrBJOOHyuBwf -SqW9VTCLZ4W2O9LfL62qP+CjxyGi+s0yqOY4Gl7Oq7R2dZXfmvJHQptVL6tYWZ05 -EM8xmJaXkFjgo05JPYAzUsm3f2Sr0UinfZwHnfQcbp1A+fJa15FhW9zbD9mN/y/x -u9jPzS31+4D5KjxdM6zg/NOzkzv5hV62/xVTZwaI8ZZ0MPAOuK+q7n9dO8klKMfo -U23UE9J3sFQxaC0qYD8T4Z/xV/Sv7HsLrQjwB+uf7HxYb/ZvL5UjikWfmJIlIatA -MR6gqDZOCFDipfhzMKAD5Q9UOmNYMxVF9pMHwddJJvqKkajHYwvnPa6nm201GpIn -5K9a4WyCuO7jCpI/HaTl+gZszjSHSXv95cX+LGl5LZM2KLm/D6v/3tfpXBYHnftk -7NQ2DTXI+qUeCR653JbN3E0IhzQWqdGo6Az5D8AhUDp17NqW3q8LYOBwQUTptVOZ -BVWFm0k/myp8hbPgo/ge9OoxWbMof1Vmnd0lCuFvx+VRqeNWlcdEpt1IgzHJCMuL -ubImNmxRpCgqNtS66aJafe5PabEV+35x+BemTvRnGJpIsBxoetNs+uRsLqwW4pUf -uq5K7xhN8VRol4P3mY0vrgNh8gp2HfxuY3mnVP70AdAA0mRlSTdFeUvSox19h5Eh -HsvvQGBb/o2n6HyLR30X7mkR3CvIqTQnltYRAyusprf2RRQncpj/GY87x+PkV+bI -K/H6+E0XkHoIhz82MytnwqicP9B3g95dCGK5TPPj74Cn8b/p+CdaxWTQTVWPgFjN -qWswrgpVAWjYduFCaZRLZvxey61+mcfF60epO31coqzvikONn5mdyBEZC4ClK3X+ -LViuLNl9xsFvl5cAT1WngWawX0rMrtdgHia5FxoQW2sopcZH8rSPJ3hjfTy1+Aqz -Z1gPveuoMQbmjlMxoPz2cqgRiyXBBw02Z2TcEl1JQ2wsSM4goUTSehWEAtMVdXKw -V9fsk6M3Us9+hQ3f6Ma9pIm5FKiO1lTXvxO64m0wVlbalek6Ebt4YcInHvyj4BQI -Tismemd3E2hHXfMYcVJr7Tz8YkrpHZ4U0MMiYd5gY0m9eAaFjAP6KQuJBsRHqSya -f8yoPs+55zcFovtEMn1MfNmcrrejFCGXqCu7pk9q8n9RDmEhM+m65IBjwTJY4iB0 -ZZ+2TirDDg1LcKry2CkLgOQ+6J7KVyYAXw9Mj6i2RyQGTUNa+WOlQLWSWRaWTThO -o6GpZ7J6voetISjVO7IpMNDC2et9/ty3x0/ZClgh8XHnDgwPYk9sPwxqJkB3LMM1 -W6GFsOkUTAOypD6Zf4xdKYS0Sh6Sy9PWctBUY0pqver3biKk35A96LKiVBel/4LU -R65k3zD35NU5tYPAUU/ODrMTCxdMvwT3bYsh+UQ5S86UBak0lJ2wIk2QBBtPJRqE -sDpawEukNOVLdvVloKHNWud9t8xJT1eO3xoQworqMzH77/fYXR2Mu2Qd/X/ZEGiz -6j5WkTMvWeJpr8B4HzqWKuTJHgoB2TH7xdhBZ+XJMA/VUQo6GDmt9MKRdKZn+04S -XzTVe7NA4UHM4U9P4d3/KnzDntNLb4Dni/G5PWZQQDIhnjzRCQqhFCNa7KZxLrNU -w0awDhq5+gbCIbYyncK/5+o2zk6GhfQAaD1i+MllKLuQ7KlmKrcM/tDPGu9nBUfS -gVh2r48B07dsY9v22QNm9O8GNeyXGe/Xk82RChlbL5eDt1ob7Vi0RfP93MVWus/5 -B6bDXSZ1eY4L+AI5yPETNgHKX9DIzPkkeA+tosRFZ9H+7CG0oCdf29hdHU58rPvA -aXqdtwqkM2CTfdYwMvXMnoGI6DnIJhf8nrfRU7ayNR/FFR1wUu4d0zy32ojqWLzv -rQhjZfC/Vl7UgZXu+KzqdNRHdk3LswN3yZPRRQyDuVoD/KPkYyYBq+9AhmtaQBOL -+Jd0Vyj6TqhtE0oq+D+4L+AechA2qjEC7OnhjfqCvxACt/4WQ4hTtXrMt4M1/x00 -D66Q+5KDcZTTN6xNfEw+N4VkJx1qwsRqVRCmhox5nKGblPR/E8tRAgEtUxq4LoYo -/9PBuwEC+FUrFELr73vj2n+1+1Aq4Y/Va+F2m+l6NWg3+zAmRttdgnNafHM94yoE -vGl8e8zN++2P6X0Tl5HMDojncAQtJvLbBC24yYMu68nNMLLqFlr9UkynbyHV7Dq2 -+S9Fy1VL2kWcVtupiQhcD4gysK8T01AJPZDlu2ikgin6WlNSg90SLBqdw4bEZbBB -OCv8xtSVgOqa1kJoR4o1uwPVlVtqkD5KiIOniEdCbNyhSQzr8B+NBIZFglImDVib -w6AyIUy5mVSl1cH6NZXBBmV+QSltvLPpWoFOTA6Xx28bVBfKHp6gqgrcJzk6URjX -k0+1OAyB/4HKdh/NOfkc8fML7+e4vUJ79G5nHYpecow= +U2FsdGVkX18+W4d9i/Pv7SPHW1HcDa1oC4SgUC3uWVpK7kaXkcdk8lV8odYGtLxf +QoKF/usOm0CkUndeJRTcHdPL4wXDkOp6edyjXo7+7DjopKio+g6YH6q/HwTeOIqR +EUajgflZHSDQ3BwBFNnamxthpBUKmLuOqSd/OMD6FeWsKMW7Tm5lJNBH0sfYA7Nr +sNcWUHFgGJeurhKK3KIHqeMo+Yi2NbYXJ/s3FQGdgUyE+C/+DiRiEUsF3Ej8Vz9N +2fQOy9Z7LgPx6pVVzBzxJxPWAnz6W20rKIDD7Cu75Vh5zJ+TbTNNIFbvFg6caCUS +1/ueAvDy0NSJJle3SyXgO5ApV+JNbwRPTkE2zcoFP5EyLG59t/Tks7U/OxjN8vwV +Tl8FJcfc5tLAQnpm0hxVDph2++o0evh81vXQoUH3atNzPaldrCIrt7x4gzUa579j +cfTMiivakat5dgT9ZE30ogjk0Xvk/phox2fkp3t3GFPrk/H1yd9uC9mUjHww+q+C +Qm1H/5ovWpNNp/qX5npZQOJmb543XUqf6Y6gLrfm4rdL+2GYdrR0fGzaapSx+LZd +owZx19Vnj4NRfk8nHRF8NupffBWBQeDhZcXoFESL5D/pvSoChLUwISv/fw9o6W3w +L4LASrZAJ5ltcG0wdDy+D7si1lDOXREMv3j7tymG2Xtuc7MnVjidFgy4bHz4xLAH +zWBDFXVMIq+yRhL5bYAm8ffVvqWBXQz3Qq3LhKVf4x3ET/bl3jNjYVLMdGBskakI +tIDliOgssk1lAKiSzAF3J6tFN30yEHMfeTaHFl+LUGIfcNMtiVspTy0eRsgRaYv/ +Yr7kktcjhDZlv1Dgcd2Bk7VZwC3wV9DL6uMVwLTNwixsmS+FdVVmoo39j+NMMGDe +wD2PcyQdqHmmnGzhT4sIiezFdatMtWtntpguPjst1imr+58Ujd3D2w+LzE5ajx61 +/9+8ewlb1c3ScuzWS/9bFztK7jjL7ar6aI2ce+eTn70OnurJP3Dp9VBHQlz7RsaH +cqK2dyFcfI+Z3UOoTe504i33Tw6jBvdLl+o2VGPJa3CXhvDjPfCo3kdDMl6g+CLh +Bi1FKuAPkK0y5AZdC6ZbVIpv9EpPovkFrkxngNVmDbrAELMtNVIBxLIm89SghSPT +w+oBNBNvlzdkIARsa0JoYjst3YGDBo6NkLoZOcrUO6Ct8OUtXrXC14FBLpct0yUa +5FDX/iPrhKqsl4HSJ+/FuzMfX/TKGaDLcfL5+x1o5r1liFbM/VruPu+4AsFfiSv+ +Rfrb7YCTgyxS7/sTl4iVvVC0jmpA37mgO+g2hEdCv1n49aNQ93jp/rf26A/fGAdH +P+eGXOOJJyRNsr1knSkmpdOpIlo5L0np7AUpAjI7pC3iSDGBu8JXoK3ciuIY13NV +tvitHB7rsX61GHr23ph360Pqkb2PGkkBILUNF8ZOFfYoLskUXAQQitZ7MkGyDXJM +e2tBJ84BBl+xk9SXzOf+RR7oYL/caWZksUD44bn7o6O5rKlyHeaifb4pRLjyZawg +EYvslYtAgsSFEFFDnAObkA4VlpVpfWzPlK7XTUsiMEpLMPv64adYGsvT9JXlPTKw +v1JOWdVz5hqSAqWMVmXKDvwwVnetVhLsSS4bGBDaIKXFgvCiBbNraURnN6/nR6Ve +bIJvsAzcTJ/ln7u+D8LRiD3gjbWLT10wufofi50tyLMI7/0d7dUrtKgDw1XC7+/x +pNnsKRreRCVqQ/dt+fXMcFBhYJ/1e/SIxN7F4bYJKPfs7yuSqdmnO5mdj/hBZMW0 +YoDVjsBTm8EHkGWf2CoAWYJlusIuiF0YjkLbBxFTdlyZS0EMm2HFrioBYSHspWH/ +sA6x/1pxOoIxHO0EEJdaoU9syOwnUS5D3pnfZyVtxIm1gquAx3BXPIyuXXgBFhdZ +GSnOLzLlQMmCRKdlP284GIRg4rJ3s2kAmGS1L0JXhDzNupysiO+X++ztyXPtYjnf +Xmxu2KqDN1a7JS2bKRGwruPdXqtxmLik+alPQ5UDpuG2QR6FFuW4nOfC3R8gNolh +ZsMnr6dtX2vsM5nfb4p5DpE3ZOIMz/PS30pXC3179pw+NYpTuFqoZMyINAVddqWK +VAVkvH49dL++OFERPXOTp6Wa1t5Mo1sHySMx2kftWv5EHWzw2Zs2c5W9yfT1hsLm +uvpdu38IM3niYKueLpGoavUNYDmNivo6C1eca8I9HxZ8TDhk/jqQD0j9lkTg0Wp6 +CG1LNc3QEerwX8kE9yrNefq0d+1MCTsZ3Hb79pLpVYHZBSemGz8BP4OzgA5rrq97 +t5RUPz7E9vkzGLCdb4p9ln/JYY4M2sXpK5lal/L1enFfPb+2Mk5sRmHEwTnO+VSv +PTSf3DH1lIfWnbU6WeQZSohHQFasqsKRIUYWUaucQVEGMEm56bnsrciumwLJhkNd +JSGtWv+zh8H0Qrg61ehxBUM32t923ZX+TKLkO1dHoofvVpmKQYYMCtjQfuqqlIVF +Uuc5RQzJgs/cWYbBseZMMKuGC7KCwlqwPnwHqoT31LTvAtbhxDHAXKukpzfpPaBu +BWKuZwUCuo+KzaInaaABOelUjdZQJt6zVN5/OjeKG/fEqJgAEvSLjOLzAmDtA3tx +6nNTgfewTqUaL3iWhsz2C0Kkg2pHK75djwA19sSv5M6ehe8odlTo36H+JE7GAyH0 +W7MGXLgjTTnfXk7eZcdk9CaLvl3zZmzMs+9MD8rYI9RGWVb43L3l/56QRB1WL6rP +k9ntQFX4lgnDBukyYBu3H+8byZSwoSspDiybfSBVmU2F7uadTHOQAtP4aMp9y6le +g6Eyc4n7nGRaG8kOJ/pYKcXMDqACT6N6fJaoUx4v+U/6/dyFGVe4cVDVlnGW65Yc +NQT1GrsxzUmKgSnXJyS0YQ/zWrOzt4DUybQ3Gzc0ey1yX79UVD5D6IuvpSbfzFKF +P+8OAtICNXOsPMvYJDDgACnOhs7d9DKeo9gV/ALY+i7CmlcrESG9a4BbIjMhpU8A +HfNP9KHatNzc2ja7RqHvXZL5bqLU8PmUgU79SY0t2eZaTB6ZYd2/F0iVBtPJ40S8 +O6aFtk1hKun1+PD5GBMkhCvGAl+OV3vWA8TJwfr6K3KZgMoDHiZvDe7WTuE9OdGQ +jtkwbpDYmmQilANNli5xiTQdK/B9SDYOTyhYzdNmRg2EF1BqPydl8cMDonGAz5GD +/oDr42nc+KfcIUvWFAC2WqgYn+CABXINY4CuoL1T9a3IDkgyP2OvA+Il7st34GH4 +aBRhYHPU6+d1HtmlWlZ1vdMJ0a9r6dV/0od3lfnn3JjA2saW/3aO3KRLEhdSvkBS +GQAnRaHjOka1maSxungIdLFAUddk++fPPLbnHVV7xSK63YueI0EVfxw42vP9gLxY +9K7QerZyq67vWs/qY+C1P8BylSv6aZgloRX0Kk+X9bV09t5Mo2c7Tr69Yv6hVJti +5bIdc8CiTt6nBIKaz7RYa743r/fAggEIpFAU6uUulnl7iqMOTHsNTcNTZyz1dbbf +NcC0F4TuLFNAaToLDjg3oQRni8LXG8wDMUcnC8ddbKtLTE2mVNg3QOnshZ37BSwy +JbDsJ0wNcS32XhEX7N7fl6vH8jRZJLsEUwyZWev5vB/BcxIN5iAV7Z2WWyWlB1rn +Vg4JqQaXc8jqhRWW3RRfkvblaDTmKgAPsthiJTTc42B8p8IXkZtGw1Io8cEd4w+7 +GirHyzU66+c6mz6/LRIc93OvKEGJDSbhhFsMQsNDwUPddqNKuUaMECIUbtiy3BqM +Z30Ilaqt4hGRXCmo164F/MEQKfZBUNz2JOCEOfsD6fhAmW+g5mTk0lo/79KI8zBk +ny17zbdUBdezdTrN6mOnal5nNeXvLxB+6xypEvz9sLEO8VgO7WxvR7AalBydhNNx +xzelrTnHqm2esJp6/MAg8zT8UG6h115etI3wP/8ptwLD2fPLDlmiufWjJeaxZZXM +b9tY36ehaWgyk+9M1y+5RbKOCW89xlXlY1FOuRUGQbPN/44sZiJV2kJQ27FQPyvN +ykGkVfibL2WBSSzP6UhlL43miPglpMA6CG8ygrez54q3J5p+cFcCwzHbxrv0o30P +2PLzbKlmctsvXaRCd81/tmuACJOUIduzc/NqYgaPP49sovre4yjL/TVD5RuGEZeI +JFWvc3/xnO/srDAqcQ13lCh6zqXtCpllytHMLm6Qrdd12igwd3E92pwJxIrSOmBy +TkIvebk6cbywRpeetgnA6JDeNnWtE+uIMdvvF5t90RXikKdalJK/hY7oh0byXTCu +U1EDAcaHMqyGJaSq7mZdJ6lt02QUDmdHFfUVSRuu1hIxCff7btNY0Ug7hDmswBZK +i8ukwQv3nELoizwhM3ZYIh2BAVHoncNLcorCtz882uJXtZbMSC7RiZ3CgnCjiarz +xjDibUjb68AHOkS8HguEtCB7SAeUFu/wB+5GrQIg1zVquAieHqUzXfWelwFZhO+M +5G19qomUilZ/3NtRvzxpt49oiqmyshyjt/AYLQDIZmMTFxTR/8ydm0E5Bn39JSsa +lXCmK0Lo11i1lhV/lDkVC2bNXo1KQlm4v6A3JTC0Wo0iJD0gnIyZ7YLz340FZV04 +s8rSOQBRuN1AOi0SbmGMU30y8fuWpWF2gqenIeCDUL7KrUDr+/jTRYl3CgogG/wc +rrdSyH3whxj2/fTPvOsy1cHtiy8u5ffv/cMFJuFT3SVPjJRgjnl3zchMMLEpjYkJ +AufAQ5gLmM8s9GGlFOPQs9ha3/j4lepahkAXNkqAAetBpMst7HRfxcW8S2NT+1Fs +rmFSGvpbzMkMkd6aFKUhCirKwJRiYNB+kZBInDUJJSlVBpBwgbGo5jb1oIPyZsY0 +f/laKLgtyP6AkLBPyY3lgTmJBAfC2hEQOLNJlMO//soYPkV64pvb8h9lQXV3Bn8Z +rKkMYk3SiXEmGzoFb5hRXqHJagXf/QfAbP1mhCtl5SMJ+bFxR5OdKNI5Sovl0q1p +NEdLJEElZ3jnIjHAoj4xRmFdwKa3ajFTmtlPw8v71M9z0rKnmtMynvVRfSb5M8xF +q98EsPzl2lCtGnpzchLNKEyuwtiqDJOlRE7SC1ls25BAJhu/LiEfcOsE53bowkp/ +wa9Y6A5HLQ+0+/sBfJy3fB+ufapFKGD9RKesSxJCNUC31v0vaCQHUmLyprr0Ftvd +E3p7drG0Vo1XbfZuFWnFEtAjPSHfpnXOLWjz27IgjlsDt+JxX+IIe/XRp+Iyl+SJ +I51Azn2KhFNnJ15RXSMU+kVAYc+5AUMlcsKgRMrF3CI5QMEekCByodV8RYbLzOjJ +YV5CptYwGdp8/x7zgBDmCOz88T+Zkr5S1iHdeh0PEUMEHOtUscpB/yuC+k77QrBH +YRdWhrM9vLwV8AvyLA0W5Dx+hHx+RDJXCN3RASS/t0H1bgiUzHTB9gg6YwuNQK8S +bY0bIqM6KlJ2CxN22KBWcN6eJM0E5ljmtZo2ZD09SQ4cYnWjp3ckrY6lqpfiIrSB +vIJVCSV0mPUgDlNdid0aOhl33DiJPT+6vmaXxwvADHym/ag4i0T3fnG1SkUWZaQj +auLTkNodiNNOaFYZqz3frCdFbtkCQEY8Nbgt75WSHRO/tI6/VuV8QPZTHzevlyQl +Lq+Smlt0a+JGLKpkViiQwv4xctaywcl+juTsQSfPih8owVCjzMMbOdl6mo74lSHT +cmerKgp5gtDac500g9PgB4hCWYNsdJawFwWnCa2MrSwWa5NOarr0KFSULb0BmBhv +b7e0Dxfc+b+2qRZBq8Pr61j0dt+x4MMMPjXS3HfvWVpqK8MzRc9fcGKOelVNLuAy +Y7eVkuPrygQUqQX6tVYBrBREnbeU5+xSrxvPbW9whAN6kvpM1Z9dpLKnY0XJ6Bu4 +mm9z3PV9ZlZWCjozxpDpc3Cvpod9RE60E6KhshpXPzbam5TiFT+YCBOTA5AvIzip +QRGYcCzvMgjTXajPdm7pHgAvC2PScuj84SvdggxCp37RAwkQ0eOIz9WedeAclkY3 +mC1vt4Px6Xg07i0tIRtzpi9oQM2bTnHcPU846eNh86dEhFu2WwHdCI+8LK8FrbAz +eeq1fIFO0UyAXSGXm0kgkOEo+LUPzpqSwXyvM63uMww7oPUhaNwjH6HVOKKdlX2p +KVm663fahVOLUJ2Oc/ehjj7J18Q9lgT/5S2Z8JMom9WaDXNx4e0OAT95flSbF4IT +lSKRkscQ1G8mnwNbS12r+RH490RQo0aJ/NzLEx4W4rJe49G+jDb2obpGKQ7/ZrWa +b0zu0f/sFpO+xkvRgJ9V80kCVzzSNc8UyktkUp0ZeaNvxVMG++R30bC0//M3RFkR +ky2B7Hw2TRifYV9Uxe46T6Ik/TvtGY8mn0NX1PsK02SG+6GqAlOAqWpbPUaKMMxg +3A95jU+bSiEeBTEeb4/Ydm6tXqUeW5IkT4RMvr+sleMOshmzcY6aINkRN4mApYpG +a4HEqU3+uaClPEyyXEkV00Qb3BK+jVOEX/9qBP/f4zovhaBnMLupBnzhorgyMQWM +vxNcyENv5B8yidXf/qUGDb3cAgOVKaSEE9knl56xSmJ8tH7GPFXHYRhnQYtnQmsa +r099VD4Jgg5075n+/y+rFnt4fHFYh1eF/qg5PkZcSpkc5d8a6fePgad25TjPPbtv +q0QLgzJOPeBJZ18emPQTVlh8SwJ2mcaF3RlqCZ3QxFPPpX9zzyhOSiTIGWoMrT6B +HHNO4oSXFYK9JhQgqRjMfdyyYJAb/ITxBX/juzyqPFiCPvLqLn31XfmvN+L3/iSq +xPB+p6oZwsGxrcZnkemkAOmECo93bu3t7VomHMLaj2RGTtVzB9slgjaTorNm3w0N ++N6P/kgDknJTH6x7UtR0jiqcE8xIi00B2fOwWfmSYnbULQLboqmLH4PDcE6Fev4P +noaWOrDtz197g3D3Hwa8XuMI6ZhKE3HJa0F+B6ctPlS3uhyZWZJwNjm7BYU4cCAR +lwRrv82AG8Tsmv/3kS0jpJfWxiWnupsHB+mLUIVBvP3kNprafQUWrir2t7B5al42 +vymL9UYcWNIJkYgtaPmWgYyKGXzbriJi8RiCKjCTkP7jC1DdENXLAeWtXXqTmMyY +uqnBHv3TZd3ytZT8sUpoGI5JLQQZO6JwyRUSi8J3qp158/x6C7w4wP8IE9LWeHaV +JbhnR5Mhw7kRarAbd//I1JMrZTLOZJI1dkYROyo/LA62aUiKjALlP7EGgdbIoYFJ ++aQHCn1j6Bl54R2J6dcGe5hxIlfx2gnihCH5LZUNPlZojtvxOlcsTT0gK/jRmtm4 +RM7/0maZO1rMiXOjwSPax3BYpw4mvqk9rcY+pDatkHzznTKemvt29fY7EB+MnOSp +lFTzs6oUsyvEDc0f2RRinoYG8IkwiXs1ZJ+OR+auVVFtdPGYWskmeYiIUwYeVtnA +MsTd0jKLc745zpFlT7njRTNobVFBsKfX+zUVStFiR3xMP0xrjbweCE6yZl7FqKOh +JsAUDsjs2oq1OgvHB97y/egW+wMNCDJYuUL6AT3/HMIw8BiZgcZ/yxkTedNxavIu +U0ejzcyBQErrVVYqwQiHVI/EuUcV315ZdAVHrWH1CHoQcnTnrXSoyNm2IFLCtk+T +nP9UT8pypoiAralBGAu/OQ7TKtHjCSJvdmEf55myHS8wwgaFQqWrR1PJjc6OJrGb +8pydkmKHshrsuMk6Ww1HJgjVjA4qSfk8CANNQgV53JaXIc39uxfuzTkxyVyFp/xU +sHWKq5i2mgpIUYnbIdrutbyq0pfBcNUqswEQWa6USsN05fOfLfuJXQTaaPfaDJQF +RUrRyJ1eEYV3Zt4ulhMUlZzYZU9uU3X2Suk1vsUgxCSOJzKKD9Eo1NSSr+gCzdqD +PB8UiKyuc/Q0WQwcWTH0Y6iuylnVQywOvXDWXOiNaznMMnPE5b1Yp4t6+3BlOUr6 +5GombF8w2C4rYl6EiceWXvZ0PHSpxacLG8xgA0R4Cm1gAStlwbgX7+CrFRy6KV/L +CkuD12XZo6iW8HduVA7tk2lTB9RFMDEfomiwqXWOGkJgJLanjz6RNmV+gZp63pB3 +tNDQ5AiIDoGFD/It5E2lhGdTubXXi3bmnUKfcU9zUN5zIIZ8MJTC6KWHeL8JH1Uk +KGhzotqeFH46uzuWOU6LqKoGProk6QxEgewkvgY0wUCFxfc4iSN7hQSFQ5UCBSsk +BbMqFxawc1Q6U9RMGfi1tDXURmsyJW2az2fkdkTKS/uWkUGKgVabbs4RA+lYv9Mi +Q2sfhl8lHKtMMFoBM6H1Ias/Hr+WpCiy+wqzsALdhxfNk/4GMu4g3LO7y5IR8cY0 +2K40cNXC3LA3qXRPvZ3yd0OwXpOpMtryGQqhNtWm2BPe+2o8KeuohXPI7MCeeKiv +CPmbrrhQJJtE/wF8JqqVv5pR9bpISaUq+QxlCA6sRS7VoOm8oGZ4R9AO0Yk+P/cC +1dJZYc/VLqggIOu9DBXb6SMm22ArHCQ+OtryU0pyEpwQcbG/JvhGvsGz+ztY2PZo +PTRaWh+Wj2OJJl0jpBYjqAo7Uy9QSx4oX6q8R4mqXTSeKARLOSMw6ccdRCG2QdCW +FlmmeRq7YWtCCOLXbKJVE2AlaPBKG48E8DmP2ndADOlIYfWnFrJW5yJ2dP6zXQgT +dfOLBiJshxHzECBUvyX7c824atznnc+jiGQL4zThymOCYK4XXT7wDdc0PgIly6hx +R1N8VAH2dor1sSTXexR786sL9BEgr5QLcc11xp16fQu9+HSafsEn+Qupw5nekA+h +rv1dUfuuWQ5cjlfPk0xmw1pQctYfqoVw2/4P6nrzxKj7uZLfXqlTlmUCLr+YEbxI +2iotSilWBmyIHV0MA89yWBSM1cLxiPEuACtLuqeh1r2sya02rCdn27ZJcB7YdcPF +fskVugcZRwQvwfQPSqzKExSqhp/frIgK1Nq7dThGQjQ1KhNoZP+EZRrobPKI60DY +ol5Ihr/FhKSQfhODcX7d+yYXD5V8rGtkBfr78QFsUny2zR9GpSPQIwPInFzO8Vhd +GmDDfGjHDXYektRcuBe9DPRKVUjrpgsOXX/kzju3OEjb+ZfEL9eyDMnBTL1ELCzP +i6QnRRB10igRgkVdK6CVPY8fFOyR6FHI2aIriALZClm6OlMrEcCPxFfolGLrlIR4 +S0nYhAAd2wbT1mEot+LJ5MaKhXgFvvXCW3j1AU0cP7mebvW/LqN+f7VhB/4M9bj3 +Gsxj3CA3UwZMA+/Ufo3NzhHIIgBywDK38An9uqtQcNWwLxaax5FzmKAGBFT9XfnY +zAazI19eqH7FezzF7/nkNA395PS/+Y/gnlavEjst2zma2Wa603vgx72DdfF67y4v +4tI69ahFtNdVoHlFXfFMwMEwgn9AaK1mfiLwE1qz+CaGN7mRjlN4E42v3JEnfz9p +iKJX6cj3sM9VNEflKEbGtA0jFH5MiOXtoq7+yqQJTzLtTeGjSsaJg7DyL+mBI3ZH +Ir9lhxwuklwqrzHTfwv20ORRXB+vqqEztNG0wqh31Wtt0z+R5s8T+h27uEv3Ttoz +iH14MJbRxEZvYHM71NJchr53fyitdM7rRf7nLYz+mNjTyq7y2ziQwqWxkkKZmrK/ +lN7ZV7dPHnYxG1Dh1tgOeen3eKAL2iiz+hOyp8SsX+HAbGPgB/xT8OCqvASIuHA4 +BYVXgUpYWN9xiHhNp4XFfwSxjWEaH8tLkBDMDUl99UD44aw2evQ2k5Oy5HcgCal0 +d1D+sbplj9Au7vxmK8tzX/IsIeC4lXd+pMTloSGosD1GfSbetfIOMvyfON3Hw8Xo +kvGdIEcklKMwdlmv/wJ43WHPk3Z6i1uwQ4D0KOHdF19R4p0gJd7+RIP9L68UGdPj +Kap49lKaoVotry9GPnkIdeGR4YLJ1X9jw4PunsfnjHRC9tTIAKWve546gMFXtZvy +JnjDogx5ZCSyEAkHnTzGG6gDucDwGmRtgHLfeKBFRDqZYpaPGUPneAL79ypbkcBp +bFMeiZuuz41ugekZHSvdgAkiIUksEFAONTjeXmUj8oIhq88lUpKKYSUhE4Q65HiK +0o/wL6NnKZR6YtMgpHC88HMrb/u9c3CP+UuDYaYpuedeoMIJtOrv4t8POWtTRpQL +jk8GwpcRW3Q3hCtmc9ZLpG5TxkwZHr0pBX5P27I5RGEkQwLIQ+QWadUXnIJqrLen +fgBd4XYF+INXQJWMG6hkYMDs1pafHB+4M20l20RoGAqZF8Zd/GXnYdZlVsQ0zBzT +7JV1zUJU/EJetj3rTTBvsqIA9/MtqdPuA95Bv37wjJG9IhrwB++mpV//Zn4dsFm/ +EN0KFKROI5812UGUyLFAEVK4dxExY4L+No3XZuaiuc7eLifwjTK/CIAN/JQd6yd3 +qtSJXzX1ulLpHJsG9SCvwR+qk4m5vRMhPk+3srweiksGlj5QKTHFwWea4vDagMng +IbyJBC9/BADQHbarQm8bNNCsfkm+K2AsJK6mXVrbFtZsnVt/xw38XNgG/weY6wwO +tK65MMAHdt7uTOPbFOhgYeqRabk1q/uVPdFg+XhpVT9TJSXqVEKLPrpp/viaWjAG +AmfTo3NT2YV/t+2cklVZM+RXZato5QA0RrhBGM/ZGK7Q6w8veQ+e33CzMhrO6vue +EbecGhOs6EjZwVWYeLr1iLH1kNZ714WuPgubl1jzU19qGjVvMST9tPsYg622zwPW +MWb1fGCLcol1FrYwKbb2TSsFrpK/64hZswJIhg0w+rjdyH0MOZpXtyA318FpjRIT +kN8bQZfeaqi/fyBmTMjqUKJWhB9sZj9wyvep6sOj+KMYUfRmLliI/VLcWUakAr+L +88QWpNuVGp08mQfzVrd1BwlJYXUda4ijGWpIjAzDaeeNagwGSrGgoVrVBMNdJmZd +/b2Ipuh7BshM6s4Dt8Ni4T4lVEl2dRnEEIFgnhoSspPZ8q2GrVpq++FKqq8RHKbM +py2UBRRNHGL4WOSVDHMmnOmoaVU/N9qehUZ8JrzpMOEf+lMKk/BwRdbWeRA86PX5 +AxpQ1tuwySzUVZdKTlg022tqZEMHBzeXigr8JeiE74eFt27qBvMELY0MZ0+XYR3a +W7s8Vz5bEn7B+r2JeyGYF9J9MQCUPPe8ukHFLbQPp6Zfg3SwDYbXgxN1YVK6dEzf +esaqWeyilBuDx1xDYkuOqiWC17i3NCFfo0Y7oltCciwcyYQBDZe1AKbH2OUkTeCG +UBvDsrkPdvbOQFDSJNGhij3CMApgzVv0cPQ/ElnVRVlqFDxldENpS5TRVx/vzkIN +OkVxcTqIKZ45cZZUj1Lt/uEtXEWbQPkAZjtdBMWUuj08tqnXytaTRVzUxnU2lYIr +rRheU2gjduV9jf7thI81SC9Lje2D47U342h5XO47U4Ao0UmnQXbDNNR9eRa/94Hp +ZMeOhbcI2ukkbPq2O8UjgyRBX5u086RQNa2B+kzvtDO3sZSuHYZqVlKdGgPxcQXv +sXhBFx2MG+xt5lDeuHqPtqfW8yoayshJ38YruYhRW2SKg8zDw3x6v3rWCK5ESvWf +fqVevQscwoaWzW6aTpHSjA/VsajWPmQNfm9CjRt6ncv08XXpSH1o10Dhzq4QwGZb +66dxWR09Sezs7GQunB6Qvl6rOH9SUDVurhsV3BAy4iTllbZR5QCsWKKkPhhuKMuH +TahkgpZDvvOH8gwAvOhcCcIpPRw6fkkBuuZh0PtCVLOcnWmflQMgoWpZKn+g3bTX +/TEa5pyve/4n4+RMJiVlc3jPk4DwpecQkaz/58euzaNzFdQ4eE6twx/+MuTL7Y+M +75PoFCDjpXtQXsDwNh0UNBYMhhiSbJ+JNHmgSGE7Mix6VvlQ8OqxkBATuvRD8W6x +QJopmjVf6lVUrf7Gw0ULqUmnkESN6D1LGdP44GT6iuTGtZjWozF7lp+Edf3GqMKT +DvBxCqX2/ceFtoc6+dD0+PGS1XBw4s0Bu2W2AqmRkqwD7l/CYDNMs2iRCIMbrSlg ++6/VVXmf1xJBSaeIZ0Ure8EIAK8TX7qMD52V1K9O/mG7S7P/94RChKJxRqtyumgf +soXKoLKjR+AS0WtDJKSGKhgK7zZDv9Nvrj+ex6rbFIagXn+kJgKVrNYpYdhegTjj +4MoVjbgS4Q8n1PKaFbwnAfuSBeUF91NUGzemKhnm6jk6r4yhRTlKlNs+v3gJHbYo +5670UO/2I6ea1VAVSDpqeoreo6YlT/N+enlj0Jz8jttLQWvhiTF7pd8bmqkgfCuH +5rYWKvcsdcb9zX2+odFDnTdYgdmTLfGzaNVmhmdLsjOFhstlDnynKmxnNTWrEuel +dSWir+9aRmof50opwpsDb/mVxbNxKbQBdgSgsLy4OItqwDmKQ5vjw2rT2v4sMWhM +cz6aAY3aICNhpyY2q48KolII3jS9vWfWb7GoBv4KQMOD7qMbYAJ7E2mHfEFtxSXQ +rAZ+Zn6yUjvdb2RNkJjj1qtCIMXs1bQLINLYTIyVa3mmIzosnfJB8KDIrDBMSclT +LPzb9Luv3I6zo1fRx6Ny/QOSomZFtkPgckUpjWMB8FYrUjUE7JWjhjq9qWAZX7Ts +EvDamDla6S2P3ocTU4nv8nDa/Yf+xwqBBNVRtZHsDg5SQTxsT+bpnTpG+9nOTfX+ +oXvXjHMMYPhkqZqBNdMbH4updAl8OAaGcnw70HBnS1lmsvpHNj4ct7v8ezGgcgoL ++gsFcQ/fmr0/RvR6vCaQTAjQpLl49e46Q99UbGVEviXvD3fDFO3UPlDbiYGAVsgB +2yBLLc4tRbSmf1zTVSdERXDSMYpp6B0GxWvxyjI8UcGvOzSb28bdYXFdKxvMR+Rd +kNyTXGcU7ZfkhMcjWJeHZj1vcopgZXvPFAAXQtnLhGrYUTXZJL5aCH+VkU3Luk6u +xQ81LyrcV4cTXrzisGk1UsS+ARVAttPJGeMp+fyKKb9vnrWEV7vX7QFABTACrZbJ +CpU4f1iGuUcM2QzYsZpINX4z9wztbKQpDcj0XtHs5GfovCDEfB7fzcRhkgapq99B +mmYffR6mcOzAj/i3eO/1aBRvorcNhLTeAKyxO2Ls7f0VvLEfsriLvfhxLFoA+Bfj +doS/Q822tIN5rnXoCUj4+THoe018V+l7nDrLunRm3zIk/0j6KCFOMCcEWGe7YTju +6XJ2qX70obQEaOMGd2zKyfs3No3EFqLB6qL4b2shX432xMSUpZdJ0QcjUP0gMVrG +35NeKx/maSjwnqbMlEv2irQvjktukjS9bJfkOI0M0FjQ6Bhl1MmbMVgqod9k4aFI +IgcYhO4L7B1QUAo6HEPhWToco3WT7cne8YJ4RrfTPjMbVL1nmAfI9IYOLHR6aKGp +QohZ7oSHJnPfHBm5/cv4PRuB8olcZkQcvoSd2yyFgzBtFx87vYogmQ9jDNFqyZx0 +hfswYoGcjM0uX0wwL8i0kOLuQ5tDa44uZk6VEFIYf/jjU9EdLL9ah0h5C4PYy55V +707GvDNj3wY8XqqVu9Q3lbMelIW7Io/hDqg8OtaFEysfSEQizhBLMl94hU00lfe+ +I7gJz3MomlgA47b7g2QtMBnt45AgmPM2ziCbcXyNVrh0Omc93kyAR2BtHkXhDW3h +A3VWfpP+e9B0l2GLltBEFvQ+SzkWioDD5Wbt99g1y1R8pT1iNuYMnM1YLyMCNb2C +iUSHB0jwGFmmz4hsVeQRoxfdoQLY3RyZ7N6X1pdHkMRMtGzYQ80RD2R3JpBNieXH +Ak+Y4cb8LDgQGd1YOxPvuwHCUJg7+7aoNuF75M3J+Uhonj2TMVpkWssYklI6u/ap +ApaFVs+c7ck56H8S/ohGhNYsoBxoer4gJz980M5qyAYEC9xQg3RjjbBdlFBbrKY5 +IrrVQiKd8q60wo9cPj+NCLQY1O89UEjqUDo1xKyU7MniGzC0TtByWaD1byydYFws +vhD76bIYihX0HRtNA7W0OeqzEanv7TOMiwlJgo6UKsj9QsvsY6TfO5nIRSbw8sFF +fDqimCNuv9snpA9oJ8CbpUmScrpi0lTn8qG27BL1kMc5/z4/1AU+wESQ4s21nRuq +k0tC6A0+xfViPt5bh//jfn0y/TwHZHWwPbtjDmOE0z5JiFoF7/w4eNI8IL96p2IR +fjQldRPZ/VjQKZSpJa5NnNCfdPmc3y4sv904RPld3m72sTQ4Yql+XSf7oNCSU+Jj +iZif0xhZQ1JQq3qfM8Y7Vm++1Jl1wpqSUpehyN4NuBzq4pjtfmLOqyQPbw4GgyPc +J60EBy/JIuV6qtE23BQY6zEf6V3ZKlw4ZOuoYnWaHNMq9mXD/aOEYX2M49vpYiof +oT1aFA7v9XuXJy4nz4WW+FEx/JwbI7VT1o52SQeT5ndTOzC7w/IJeUmhkZ7/Nk0p +fXL+LLpPktIRYtfwrCJLTstiIsP45Q9nwJT+rQZ5ToBEP1zKgUgCmhsOOuxXgTdv +XWIqPZq3VTrSTSM4LD0YkJL+oDDwP4lbF/qmMfDsgvoi+p0WFQ7OlsQkvB/UeLmC +CgPBOQmZSQ290SXxEjYCQ7B6PKv6/ItureWob44JgEuUEpIb0NmQYqNTOcl8xKJ1 +xdMsDjYOOOCmlCBuWxVKK5d9S0vYmdPNnrd3W8UANO2uconxTRTxgxDNlGsmlCP4 +qdOJ+EwbazGO53ntEzgVzRT18dsPHlIWt5CNbLI2UUUM0Msycdlvtik0bZOQS0w/ +LkyF2+Dth2PC+h4/zcpfcl27nPtaHqlkG/WMlPC+BOM5yxo17cQn4ZvMt2kRrzGu +igcQCImwly5LjwDNhmO9kOvbq+mAJMTuWL+RS5hZx6IEI6iOAo+kirQJ/WPwEtmc +wCQnjBAwz+HEskof//eHnFpLmPecFcBwJ3sM93NRkyrRnebaItkwzocwh2s2ayQc +Z2Y+/wgu7t8RbvTekopRUNd0JCweA8QqqURzpbwkArcuX+p2Aw4BB6/LrzmYQPiF +kdznNuwuFUArvB43XVrdb9eCPNRZBUjbNqV9uRBqNvgDeNKgX4JHVgyI7SVb052a +KsrdXBoj8HHb1+XF5Nrw3TndbIFnm9UVDCfnqpRq7pbtHp/i2PH38WekImPSnbk9 +9Tt4g5/4dbpsx7Nv3TuxC0+xcKmocD9arveYWh+Y9MNjzMgMxYumGI1+ft3DLkhl +OEDwD4puesOD6yOr6trtUzi08wSmzTFoN8Q9HpkYc1ToyPkPWhN1OaxhYAKLWKvp +KP/E3Q59MMDLcN9e79czudpsjrH4qvv1rOStdp4lz4807xvXmL+2z3GYIezTIl6o +zD1U7b4+7ZHd+u53G9OLSPqekdWq5ccPuasGbbzGX60tLgmd6HBHK4p+dAOUCR8n +nNCEvFpG4Irg4wvw8TwBluBCnEY325rUYVffLWyFBwOK31LFXaqq76iOz4iYf38f +RheRmg1qdGjhfiAtHFirpVBslSufXept2BRHwgvsJssheh2xk5+sL++4cf0MO4Th +rLOgMCzybmLZOnRE/9740c+TAgG8irFsKrUfdWQAEy7S0WzDkT+CjKaQRfx8gi1z +ypB8Oaj6GjyHFgPBC7uL9QCo5jZ4/pLO9ANmuU1pYyfufzvcxSLX8431ndDFRVUW +iM8yeQEn43LoeA28UvAA2q08KAISzzNq0/EBayFSJydt4eiE/aWX1Ij8qigmbXZn +gTIaiL+p46/NbQkNS/EBL/V5xaFCMFM0qu/2TAiJgepoxhGz30GSU+ZSIE3XN41d +CLEAyqQEEA8JCMbwLqA6bACbeGoZVVuHMhg8HyqQSne0NsDrVbP4wVm/CQRt5u0M +4VXuiQK4BI7FttXr8cNAGcFxVTrcjFPCw/SdjVmkY9fZS0hb7ZAc/rqszOhfYw+Z +ANsKaqzFnArE3bzL4l8LRWmy5xOaFxLVuVEOLk5lsi6Hq2MafkSFqGMWAT3AOlYZ +B4Qgg8xxBGM1XUVGgakEK8vLRvtiOn9/mD4ToONYJ464NpnFDKvdvzAZOhHQY+KB +509U6ifwn1AxN648HWhKz2xibG4HSXbAWGDBKw9Uzo0yQ447jQy0Bd5D/ivmYV7J +Yc6qvzSEI8HCr4DYQvSR59HMITlj0RMuFpeAxe3Ngq91paFAOHhZMAsiZ9zigfdZ +6hoMqOujYGmzm7TbtUorQUok7quUFPhPy3A8+O29lakJ63nNqTj2oVMxh9E2i2ue +oMC4QdVuirxWJJDcHofyimlXqtK6TruqeuUew6XNjX0F7o0HGJ6fXxLSh7OfiTH8 +NvvFggzHxo076UVJNxh/fZH7X9gGmyqeeGOIBSoy+30OFW0CjpuN0R26JuSouht/ +nEO+AopDP5SIImFpfJsXGH3qf1Gk8EMWLcQMT81IXcbZeAG/SWF7HX6KCbluLYqS +J+GhkUVVJCiKWG5oJAaEUitmqCjS6y6CoZrctM0tLIjVwGfa9Tn+ohq45xbZ9tMu +pPdubPp+dqMmioqBU2FRzwiRPOCVSo8H7hYVQk0Eg/anBZlI9Qu07i63REK7WICx +q1JXUpbeEsa/tIB8tcmLydqTH0iUFHwsQFeC5rgwnnzcOlPvGTMD+BxZuL1ghsT2 +8rxcrwFy0/N77jAkv8iwqrzUj4AmGMl7kX3DWkn3VhueVtlNr43FiALwu1hWE6Q6 +w/97JArGodemFPyH13MyY5L2rIB7rBR/2CSyXZkBmOf/hxFAS9/OPTadOk5TjcCs +uliQgWEfqy3RC7HtIoNkRVDZ6neO2D/Zu8ZltAy8m5Iv3ZQmsFbzosuFQ4z4wpLn +gGy9L42pnaFrZTACTk3Yr9MU3eTHaLDmMmuyCdEFXmUm8ReJm3sIAAH5dqUgdSMw +Mr+QtdZsexWm37jKSkNVZ7LvFuBaXLUfDt1x3SPOnLxAi356Wx5rJpZ63WoaNgXJ +vzXoQ2mxWUduAzbbh3t4/4n3bsiA9q/RDuMsHuZgwvzgGGSuIHESUl5fybcKp8eb +WoCSk4Fnvw/OhtOpnTMejHD7Z+g72w1u1WzZkKLYKARAje2x5kmm+v+hw86JxvbJ +lONga18B79yGyNiaosA+62vukWI/eDc0/QYAV1jTONon7IpUpZIUTJqkccXYPR0w +jdl9QNBMLoY= diff --git a/scripts/mount-shr.sh.encrypt b/scripts/mount-shr.sh.encrypt new file mode 100644 index 000000000..bf6f0a02c --- /dev/null +++ b/scripts/mount-shr.sh.encrypt @@ -0,0 +1,71 @@ +U2FsdGVkX1/HExlufrZrqJxtCMZM2UWp8ls9HQt6jqnVlh1yFSm7EoWl1wJJ+hN/ +ejTev2LerCr27Jz/SLcpCscFle3SqKNcR/eioUtlqVMK9rHpTgnQe70rPw6M8x1M +w3ulxtMjNgV4a7LEspcqf6ZByP0sxjSxE08D69XZNMpnzfZukjbjjB5nBst+pxNK +oH5AYeQl65QcMQHZM5oG7DGvZtz331asBqboV9WnRTR2B0ExGKPqyGDm/P1WvRo5 +nAYsCM05dQYs7NiFutfBD6PjCKkAqTakBQbl3UGxdLAfEnoukZwb7fG+2+VsY47I +1eJJsj+TmllNYCtWxujPVURbuEeexOVWUYUIGcwNwLoMF3YIwZfm3TX5xeMxfLUH +lsR+yC8KI305Z3jN68eZivnmjyZO+0nMLUg9/v1wSC4lsNRO9vM/3zUUqNGNuMKM +mc2Oulnfm5q63FH+bZfv7/wzocRzIceiEffr7/VGSZJKFG+TeXM4gZQHNRA8axzq +EIFdsKVjoBrn3tqZGdimsTyeD4IJZMllzIbLXphcUd8h2+xRPA83weETPugFIwPM +cKpRuowZBCbo2UN3MynHJzwCxeZ+Hh5G6LAG4HCEgLwnPt5/HEomjGEQc99HuHIM +Oi2Y6LsWX5TEqyS2Q53LcoiFsdXQAoYp5CJU6boa8NMHz4bkUUMxSTbPePdJk649 +F5YE3wCuldZA5SdO5jpAeh3dsWV9Qn455IGYl9SZKuL2D9XelR5L8PZLLsbVY2PB +P/aPJUWh/H60ntjnzE/l/jyRraiegHqGXTIUXjGopZqjAtng4hC9awswALm2nd1x +TPHHf6DH9pnEQ0Jn9GREnolTinDIYfgKPXrymoMV0fIQxlNg49x3lxnSffmpdxMX +RnohAWzcP0OGjdAkNi13keiTo2akbowTqjgI/+ziC+za4ZUwdxtf2MO9Xg9vowFl +3KeU78sg9ABxihL/W+19aQiIiMeFV1Rp3xSmYB0AfwJlY/gJnrP47PAo/+tBKTmu +gHR5F3JmBd7teWoSigup7dz4Wwo6TbtWuJXEqux6WRxo0wOHe8eRjCt30BjIlLhh +CzdpKmxDgQkTHy/oDxC+pqNgh2QjOXvSqy58+3ywXEZDPIJL1dTNIFzNQuhWbrO/ +inVdUZ33AJsbUgfaMplJ8w/ti3o8uqVM2kezSJcDk7OkTirSw3J41N9VEcoKrkov +kzjDbC9BdnKsJ4wp3zZYzOu8DAmw1N4Io/C6tT23zUOyRxqf/zF3faVgjXMcbWLS +M4ax/JhSvOsl5mCNLC4SHfJwZHxwcMEgIVdpJhM6UgaA2ITVUz8yjXyCyZVDqS73 +HtPAxeywTJes++jQN+6rmjZ9xcmXXyJxnxmHoRSuWtTgtm0A6WonFGFMUyrWlO+/ +XZDGhMivylUlkXE6m/tJnMHvG1FMzi2q1iy8VCf9DihGWhw4EfNb6K8COwyKTnE9 +xyuA4xy+i9IopAr9YYyX8btFjiZGmfvctpSoNVxRXUP5lhtTbztIRJTuALeILJlY +cGgyE3Uu0DhwZ4Ra/ADuIFCZVLJ9tdeXauwvbnjXJHma5BjAiwMJMVpAfhX8V8ov +hgF7jMo5sUIxklCjRnug29MEQ5tPJ1v4LodnbKCdcN4aQgGptpGTa3u+yT8d2NUc +Xr8O4KGoGEqWQAx8jlnwQ23HOKgfRC/LXxdMfQARerydUe4F36aM7d0mSRpyiyIB +UJauYnFBrxvlxTNAj0ZA6LXcoUp0wyvcVLCL5AG61X499UvYXhHC6dDOoJfcDpww +rmWG8/zpq0O9De7lh/4a/NwjS31kHMSaUT807ajd//t9WkCSe/qblWAfnHCMzlhz +nhEasSo2rwEzHWvrTinBhgeun15nBrMsuekoJsGGL5HDV3b3xKVgMQAfMPBRqqjo +Hv/o1UDu52HVqvbnEphvmiObhlzU7Xr2yV4BdXhBrG1TsYejquIbMsOnbsRCrzPq +evuvw3DmFP7kHHcogFR00kqSh2rdM15MYm9V03EydlmijmxlaYv9D/xXOocG+5Qm +xW4qkgb0Ar9kZCwHyK+c8xxbSoe3jlgnXnEev9cKfEzux2clNTp52yVf4YFy3SKf +jQ2D8HK8YasctfSXOQZXfjLU6Kp0TBJgjVECwyrv5IGjxfhQDL4vH1lzIQWthD02 +ocohE6H0mVWToMinuCvppiHnigxp/rsmsJ9x8yGxACOLHmbsI0JpCgcuZUKb3jG7 +j4j56fXxNv3h+rPQ+AqwIdyq6UfhG6pej/PHAAuA7a4oB2eb6r9jCiqGxpxOpZrr +0o96OgVT8QGeqRfRCr7qRr1B5mM1t/GCD6ApNvi3AjO1k6L91gi+KpZy+ZlhOgIJ +eJ3bACGWkrwJjS5bcGXehuzLfzLq6hkOBzUYTcWDsXT7mfglCGqPf6dylF8MbpUf +jUsm0A5fHaNmN7yTJNgWgTZYQPJolfgkj8UmgM72zsrHKaVsYItgd5kcaVAYl51e +OPkRpJg4TpylvXmdg+Bd66gSO2WeeiQ5Oqy3fb8JAl+R0m+rJAR+t37zOEgIdHf2 +IKCE0EZWm/n978dvqD/UjHHUfgt0bT77lm7jmVLl8VSDXLcMqY2gYWcfD2A/EKt/ +LyYoiufX/XjoRBzfIYeBx+eSVbjsDQj2hkwLglFPrIVLe9TOwXkbQMMjCoalUSpa ++KQfg3AgIsKaa0ri082degNr8pT/k21GIL7xXNuaSnFQd6dAR2i+wUWtAb481e7q +OHx10F7DNgl9V6CJ6h3Ttacg8bderWDDF3Uvm7vI2syD9/tcXjuK1jBh6BiGa0hJ +lKdSAp1I53CWRzaR7wZz/ZVxzg9ynfLj0TjD5gkcnGPWa4YW3TcgS0yV9JnDo64r +cj1oJKdhHVTtFGw07VKkhtj+G83NVsBZ2EDRxYvDr9xXHLeX6kk+B5Sy8WnjGpup +4OFl8sZ7bInSBLaqVRiX92+vfU+f/yNPT7l1z2E/mxYOHZYOTDgyxwo0FkD1a2me ++9f3TKuCtjd0yJ8Wg/Jj4PRzEeutIjsOSQbfRI8VrncFn17qDjafcvrWiwrrRk8g +15HI5jHATDyPDDHFLqKLwAKhL6Uo7SOyP2bSiVYCSxPMCL2R2lzc72dfktTKIkI6 +zVwmVmKpmvPGuC+zxqLsc9ypAWzS3VNpxhprtTigkYeVi5p4/mj7ablylAp2cb2q +jXknMLumo7zmPuirB4yJykUimF8oa5QLCBfKgMxkRR2ID1BFD1GGp6n9hiO+SpIR +stsVt504PECmXFjQs9sXuejHuCPz+wJCkzHCxoiUaXmcuLpmFXTiKcebaBlSyGUn +2l7cVVS6aUX6qd5Tvgp+fP72JS1X5OS1OAimqcP/+OTJcE1wlUDpRrUd+oHwU7HY +oWT6Xg9u3Q1wCPd0v3ex78Z8RahhYLUYO2S1m7w6IQ1mq2I82CsTBGGO+SiMVDdz +61zl9atrfNTH47WNP9/8Wg9FzU8OjRkAkKsv8elnVdmBIUsr2FxFLFM3LuuphGFj +MTytcyg6Ff3P/vpObB4vSl+yFI8CPnPI1EKqSakUOOkASnSqHdXwGRrL82SECX0B +uddS532hNR2VRprkg4K4IAcmzA24NP+AjfOnk81sVEGW3+2Qd908l6XP4ystJ5Rs +OEwHo9/kdGlYVLiNvnkScjlDolXuTycCQFp4jHgXTphuGyi9GOhAlCN1Wvc5drbM +QnBSMXpfOyhGLuvv2DP4JabpwYi+6Ub24kS+H0tUrDW/n1EOSoGTW0keJ6fEcTwS +vHtsmUpymRchj9b5DT+37/BMzny/7zhqP+U44A9AGv0HdKxo05u5pkI/QQXSDTDg +S5bjOf2Crl+ITHml+SYDSqGQr9S/ii53mn3/9IjRRIHZJz0PC8vGL6fxDCkX26NY +BHQ9BUeObkCLiKaFrK7ppBtuM7OGNvBNpFF9/yDb0yIsq4vXLDuXY5tHOlVjqhM6 +Gzw2UKqvlqLgZz7bCH3SR18cXdV3GLR8Zi24wZhMaIaQb3z2TdlkaZQb+5G7MSzz +zQ7/eFkAmn3kUcxpz/cB9AG6/0yLitegh8YJBQenLAZMiDtxNs5mOk8NIfAQ2CeD +E4zYQJM31i2h2ELF+pVjcFP0d7RXQZX9z+ni8ID3fkqgBOezjIUbdqipf9HpnRRZ +/7k11CVTlpAa4OZS25fTLFbOOUWF/fLGTVN8ltAmZpYis2a8f2sUz+P91KXP4X0Y +/1S9w9EeDCjTB2CCAm+vk1BmPTMppg9KWYtsOvfnj8n6Z8U9zPeRpPcrhiLpuxIp +P8M2/MX1L9qxXtG9r7BROhBv6vf+LFoXOiujoSRszDT7RoqIw7+trYsVYdV6oH1g +PZJ7hKF37udRyd8dKTiv14JIzYupt8xfleiodcXKy17nm8DHd3qajM67JBXTNGwE +vG423n1n9g8Ml3sS4XBPoDew+xSZqqcXkvWnd+FrjAbU57YsoAjpVbrI05DfQrWz +FONOkPc6DdilSd6zIF+taMRzcfNlgRDp diff --git a/scripts/profile-shr.encrypt b/scripts/profile-shr.encrypt new file mode 100644 index 000000000..8d3ee22b3 --- /dev/null +++ b/scripts/profile-shr.encrypt @@ -0,0 +1,197 @@ +U2FsdGVkX1+gpzxWmnpkysVLsD+byqA7x+5PBcBPWBXAs9WXD/cqZKOGfn6a13+i +hb28oQSKA2kzskuxLcHObk0m8xlaB50LeCULFkqdbNPV++DXAMbflYzTSJNk3oag +VbEyjpjpma/1vHQxE0ImMZFmFXkWJJhBsl+yXlE9TGJuFknYiVAA2yj546OMLceo +bsppfhoEE8QX4WnH/uWUZTRro69ew6jaEXvPiq2HoGtL3IbTbF6OKj8BCWiYz7vh +ymHBtAD9Mn61Y2a/vSYhmOwPL7ckoCSkZRgr92If7hs3i9poCHB2h0AeBnihdzMF +r6jG0L1W/mOTvF7ed2xeGHfs3iocFEzrISXndqo9bmbMjyqlKLSzgEvnT2Zy5OJw +CJ5ehc49bSkjXfanZZv0f38QSjiCE15HvEI0GnjKBHvjU9sP7Lz1ZmFars1ULikU +rOMxgObgS0arXdiWfvBE/Ybdtcrg3JhAy9skZmJ2GL4Qn2PqmtRmJGoIkZTW89Ld +NHNnnLLcyHiBvzC0ANOp3SGLqnJgf2K12mSsRtdoIDwJoS52ylr7ihL5LRCOiniS +ZGbCI92mjkpUFb95w57a6x3y8TlDy+HIu0J2hUuCGU2Lk9xG/xEn742vGvGzBJ2c +f5N1ttL98mfn1w8F6fzxz6V0Ddi5dsYeWwsltq60PVhYbsXxvHyxowgk1ppSo9Rk +KtqP2yU930QkO1Y9e+0ORW2mwuEHRi6/eKByY6vkw7S20n30Jh9VtC8IolXve5T6 +m74SOg1IgFspVcx5z90kq4bgVr55TjwBTI0uCcADaCSVSsU3fvOhutxoBRl3scmZ +/5k70UYPkz8TSoC7IYDF2fXbL+wst7sOHOTRaFGWKREUiF9+j6fb3zE0B0JJKLeo +pngQVqsTrcSL5+le6fG2rPIanZgJjOk63Ty1X/Q1pJuotdIr4+dl94cFBHNEaBNf +3STVzuZGGTcsDMshIHDv+t4D1jjRngqfirgnQyIt7lWP02uT37hHEpt23k5hDx6f +Aumt3sdgWWVuVIZZRIxbjiZ0USaEzF0+goxWBPbGyj+a5IENAnig8x8oNFiZ9x3d +CbRekDkP3lBrirOT+YHFypPa3vEnVsaEls1SWhjxWV3tseM0l6kf7kXfByLLfkiH +ARLdk6o/zg7ECrzYepXez2l37YkFp9bEFRX4yDD2yrE0wz0RchJQau2SL7SaDGau +IBkzYcuSYgvlPvN5Ah96HyYpyBbAY3yHvwEuNcZAF3N7xZBk1EY5ZHjPdzWHz4Je +8Y9F7lr07TzrWuivZEJifQHEEHgjsBC7YMtbqnC/hOeYkU/PpmieqfDE/PLg18pO +7OAh3C8mS2qK6IdR0kmdMz/V+nm3C6D4gZoLpQtwWCheCemVIzdsqpRbqz/nUYd3 +P7x+gy6FUYxav/J6C5ynjr3vhW5d+qbySdOGQSg24R0A546xu+kj/YSYYJPDH+s0 +0pkLpyDOBU1seWuxwzBXVqlhNB+ROZCSiGX+0rdqME3lOVbC2Qv6hg993v/0iNrc +PmCNudPVPU+8AhDFnYbL4io6ivjJsyvc1tU2H/82grN9EDHVuQR8Xfo/k/csxWbj +WhDqdiktfdjLO1p2inUwUeFk4OhpsVP9ahl+9LnhFA0i8Egn17Z7cNHFbRInZeFj +9xMQMnXYfXHx5ZxjFlRZBOdpByxDUHCBDrf67g/14TlO8RAednIrtLandH1s1DvR +ZamBL915dPLsBHQk7IM/LoSw1H0Xbt9O5MazgZzLiL5JA6rOtHvGauM2vztsjDdx +iritlLdELXgVN9+ivJQy8h+LfuFduH4ALGG8BTVx+5eKRewsX85kv2Rjuz61oaD6 +K6O5nW0UnZUOOaayjyBQE4SNsU+WLfFAVZLbRqjEE7txBCvPVDYfWE75WhDmnKGy +HsrV7be5C8uzyHf2aK5W3gNHVgzzKMwNDstGSKN91LlN2kw7Vp9IG9J77p8JK55+ +y7n7ltooIECIqzb7sfSpB3bhzjkejuJ0f8fbHT0WiTAJcDLmnlSUAhkYWVW6ioAY +opHE+3fgajlGkG0STxA/kwW38RF2jPp7jVsGo1iySy63SXgi4m6aYPChejAAYMXT +L1eDkUElDrXWiLnXNjbfIzlrkoyvx1OQGDeST0dBVSJhIPyqoZqZV+WCwFQPaFL0 +VxfaeOdq+qcDVZhYgyal/hc0cwGswrXC880XZlxARR8IbJE8U4N0XfksFzjMFwtU +oq3hAbDt4qh9RRpt74GrIduGBEtn0YEBJUZCLEv+Rjq/dtBqDI0albsWVMt8lfSO +2cBrFLleQJsMo48tu/7aDi/fzWtKJhWY5c16SzTtrsr3/UQY1+IQ3wUui2rntGIE +aTMU8Pgkcmlk0Kt01KCdnzgDUACcDyuZ/xh7KwYzQ7R0rUMjizohoqOeUxX9tVGR +4yztgyovyl9zQfN96BMT//OGoXLkd5ZT/YJ16MLEy1miq8r0W/PPh4RmAyW5aLHO +xcsPaLIO+6P7m+BTnl5swTYbGDzTNAoSkawJRvi0kJYJOga32vIwvGgUnerfQMjK +Nq5afSmI8MrKW99SWlhAbkQlCC1OohUJWs82IULNwtlhLdGM0U2LsjZNQb0w1l1s +lOluXeU6ampj08bFE3c/3aTojVeTym4ALsm8aec7ezHDEfESIAb8fqq6RFQmjeim +JCH6wsDkkjmuX0/5RA4Ke1mWAdfULbzfMZAAV9k0N7YP5TQ94lSeTJ322qLXD5Z9 +93J2Qg6u8q5X8hBX2vnrJt1dMcwPwgRxDQBSUx5ruBrip8CPEzu29k0D36yufdoP +PA9BlSwj0kluxzBFpQ0iLb18qE48gCUCypa4jw6tdpAb9eXR+DcTR3oMEhVQoCld +uRrK05ehQjpjk/oQBNohHheB3zQhinmN026evmlotHufcFvTf7tQbe/8LJJRLWd+ +kphZfBsD4WFbfBu23bZBIZoYnmfGFcEGWjOF74OYhvzlU8It6AMDyuzkMwuk70kd +J1Oj84Y7ei2iq6wxiQlfMYSaEwqRRK/dGvpyYjZ2jqayBwhcRkFAkZ1B6AEAOMMp +moGDzGV7NJqr2j+phwygXXqMs7/ehBoxSGw21AWolNxWTJQEOws0Ld/ZZXVBsDw0 +j37v3xyzecO9UG2vWGACcedqSwEUD1IeOF0gRYuq3F0ddj7RBy88s2tqw07pbEXt +6JceZl4WFW6hk423UC1CTZ8vhqhN4565CHc3W+w32rOgzi73bzf7j2OKNKV0R7Vz +2aXrDZAwH3RmDacxwOnQ3aovpNiUPs459bCB7DieJg0rS1q6PnscJNKxUHA9Vmgy +XRwAr4ShtZy0F2TPwj0Yrku19/BnkFRJwJxEZEX0JNTgkIhOeb7Rp9VDOg+S33/9 +o35A2gTaSooNedI0UU5V9vvTaUvkRZYQR8RkBDEqoTOGWYdek/XBgpf3nDN+f2Pc +Vo4eiGisrXjNkbFB/QLWEV7iukFtDNUxhVpdJ0Q1n0mdWtyg1N2Isc8WyIlXutnG +q9eH1PbeYQ0NKcqRMBGOY+XMVaHShXMDeQFYerVhRNBZujR+cLHKvQKS+Y76Cv4N +61jyJbmf9Sih/IEdBoRQn6hAhkYWSRmyMSvKX3L0fUdAAYMoIoHY24fS24MJ84gC +NoG7vmN0gbtC8iGSFX3o6kIdyE2/gnf3iK1LqQTNLDwwS9x35PosbIGRe1moIDyI +y7FGaTOFXKCUnQ5dcOPKLnHyUDGwy4JUKfOfKxG9LZUhoYWLQr8YrXEDzMjulBjJ ++cUq262+b7o+TtBL1o3hnTaB5Nt4bzk4FGrzSpeEghWYMTuotXDjwpXyBP4dYhnH +3srRrO9tZUn6F/mO13vGTrtAvwdtWommcPPPWAwK7BQfe2Ux2vpB+OhFOiL1PTA0 +KKu/a4bLBoXP5hrFi3vCVesbPcK/IIPPwwbH7XXwreyV8mpCy+C9/s5SSN5hV+7F +u26lZGUe/oXprXufDQ4dAIY+D4ubbTiKHsjRMYZx3AVWW6fkqsKR4BR9tkko93+O +W7otfpJqpKYz+MtrIqSpkng7FyTWc4Lk1CDwLwa2GYP19xd4bxP0T0qt66Gz98C4 +N/lnHtzZ+7yN1IMJV41P4mntNrtqvXpugA9b4r+A1woFGBiLNJDLnWI4A3VnskRT +d9IXq8e4ODWo2mg6WeinVi446WfZxy4a3w+v5AEcOrIE8BkP4ynxK1wzcEZkGUO8 +9TT9EY7llqNLoI+Fi50MokYcPOVTUZ4HLLQwUuKT1KqGFqgbYBpK+sZ5xS4NdHRF +lIphD4fGTiPTjF6ZWbbTCnWwVdr77f+tOlHPJjxFrJtWOthjkWH7lHbPIHIB1qh+ +0hbamNT2VMTocdDr37v8NeLjKGkaRq7gsAacv6OcTPagfSpOmvj7+pebu0/M0sHj +Y7/sqGEonjHXoLh5EOu21qWpTlo15gBomXhw163HHN3AmwiK4AI1WhYJdsZ1vQky +IpgvHb01qapVm7TSmnv7Ja5yC3JRo3+hoI5pI8CUefBWXkoWXD95DUXomZ9EakdU +gBoWqbTMFZR7MZTQyVOoP8yFIzqs3wv5JRgAy9hpRDIso5vj/GLkJGa95ETbTU/t +qOnFe3uGf5+jbq8+8lNffUAJX6FRCMyQTS9uaSot/P+Z7zLHBkCOeALacT9XwAuY +rCHJtajij/Y/mF7OZBlRrqhT08+1yUn+MH4oi9S0oHB+603tRy7dIk1xFZ8ohujb +soqGjZ+lx7nIPjJpe5RkZ3FlmSYCEWuWjNVzWWkkJmxuOP+NZaVjnaBrqJlwUy8M +6Q72Ixhs9CaP1S6ICC8xL8a6ky0+5QrE6eie4nX7mqxjXn6KatjVpB4TJTCPiEpe +k6nkfSfMmgWtd7YEb11nlrlnQGTtfy90ZeSChRLXE65zDrfRu0YPkg4ytvoTISJ8 +HOhEALkN83rXQaBGDGsFthT1i5bOZIZeC/Xu3jtANMjQBMqnVTp+7hBAevvCJDsq +kOd0LAsGYjgIM7jiQx8vEynu2l4ORx4s2t6PWjbc8N0TZkwtKp4aaEt1hxbau9IQ +xgtdICFeoS5og2nV1cmq5Um9eVH6yFr0QVXnRYXtYWw1nanWXcJw8ZknaoFIXo4i +kfJLxOLs4z7gekec91sY6MQbO1wCbp5qAYw2Y12GmJZX1eBkvIbvGoIEkwbdASe7 +00mvbqQT/iYMuaAeMCdLcdqTHEuITF/Rxi5/QRkvXCC31jqOtmnOGwOZ93yrngKi +y6LlZ1t/QPNhs8+wIrMGbeynL4qMTBhxMK6jPWQ4iSAKOLIv9kOUL7sKkbBXN+pp +sWfFnWIx1EkoM3CkBNK46XhHyNZ46zjNcuyJ02Utb3Ls96cnnt9onRuzwRfp+dsh +2PPEk3hcCZ1Kfxl/LBIIaJFLYXmvNK4MHjzFLrYYtu7zAMlBNXJZ0RQfVo0Qsgw1 +vnlB1z0Lqd3Gulq5iPEUlOl4Ii+/vjXrP2WhZmjOiplwmh4cFNsgrwxlWi0K3eI/ +cf8rBGoe3BzRWvpEx5cthEeNJ+YkwiprbriyvBiIiw0UhdrC/BqTXJmy2fQIdMHB +4jiayeB977k/GdnF0HWRszVD9zEHUL47Kgmm4QAzsAK0c864dW0mepf51GaZP9+i +cz8VL21TNLQ2zto52eyrNKrG6sd6XEsybcd056xf98LqoNxxUywqPY6ENgMK1VaO +2Ce83JBpZUAKjjTNz3PZr2lcL/epnzzK+UghNCQFoAbffkYjsnUng5yj5CGwg4ST +3XpXvUio/O9Z2oV8hhZY8sQ5VGiwMkRfAufXKqwY8SGWYQiurUWBsk8VNmvs3JC/ +7o1Lr/6ZEiJxdzSUV0+o/rmS0DpaT9oMLYQ/dm82gPpCsVIvHcqqQfmqIIKBrZA0 +otpXUIqfyGXBB+yU1SPKPwGq/Proif6GQq9UROgN4Y2trIkHvXAu5mZ2Rf09tOGg +DoSj/GLhligvN3eh3lewEO8qQJilc66pCSR4OcwJAOrNnkg5fMQ2/9G/6zYxwX8q +Wo4FJKjTN3z9iWKThnklmgbhHI/Efcg5cFBinoo2EH8gRTda9YvN7MST7dXaU7zO +uQP/eYlKIBdyS18d0mNsssBQpMFs8c0W2YDinzXTnXmXMgpGOiBiH2877I6VTo7l +Mx0SiQ02tsa/rjknBPRTKAwMrktPEoF8iF3dNnzk1Qt2OKYawY7zAECwRnnZhBlh +z8dW67jv4DQmeB8OZLesaWmAQM7/gmw5ux+W8sf0LAonhE4dYSbhLjsehEL6zHW1 +1UazjQK1BycAmxsB09kDP/G4s3v7wrlYT0Z00yzn6nl8OBdSUctjYXESZAcTYMzu +xH4vAUOg01YSLCmMNT1gE8GquzSLHMJHw1xZQdXtucEIm6whVB+35I3nTiyfyL5H +OaCt/zb1TjfZkLNhSS2XOE9C+Tr1NX9obgGH2syljTL+dFs/CkIfy7iyhDL9Tlk9 +evlyvCbvJpFlOCcpS4BKNaLICJucTnHy/eZN83ooLRdqKCZngZUtdXn0QY8qGuJm +tW+AKV95j882dZFEz0PR5lWfpj9FMp3ESmR5asb6ciMZmvBpfEIivc/n5YCZxvDp +W+CmjY8JZfFKmlO+E09YjFK3xSHvwtJx6ZY8Bfrj7LBflwHIOyrVIEPqiOeWxrVu +VTiioMKI1bwcAFg1Qt5BL4Aju1r7cCKHUZZKsoocHfZKkOXMrGmlwFLYlsNoyfD6 +mbtYkwYmQkBkVInIMHUeGsFvJXxwSremq8ZF5fopJofTGR1kUqBWm9eianTYo/vp +cSbykpkD/LYONnIAXMH5kOtA+sRI9EbiMbjgR+NnTUEZgG9fyJAbEA45t0d1lchr +s3+jT+wwRm1PnIDW6cleD2h3W21A620aXL1GbTgnBf42XR3E5GwBEpYPF8ZFjIcO +jbiMrOQ82QcfjDbuxb1j9Bw5yJ95Bc6GfVcfS1LYm6N6s6l2Rt+37uF4j39WO78T +5dcvI5eXA1kuenx25hZMYZ7SLOAWUCJIRSLBtgLZx1B+hm4K/ezmrdbcMQsXbwjP +5sfoWZ45uz5b6I8eYr7HK/M6uiTNJRhPKJzsGlboB4PyMZ//XsW70JZNCYc7By6v +Gtu+HmIbGVVY/jwG/4nOF1nXSpnJZOJHXEgc7cc4hD3y0579maQsYF3ET2m6+iGQ +Pc76eMu/GF5RnK9XGvE07Vej7tAw71sN64bkn8ylVAUhXtHb+Wf7hEh5QDTCjAl5 +pk/OEoRrHikD9IvmYtMFC6oYnpMSPlaIJGC40dlaTjpsfVwQE1NYpM1piIBViKpp +OQRzC5sQx59F+oPtsVMUNyu+mXMTigsWMzmgDWs31uhpL6W09KpdIG+rhlRDnit/ +RU1nUqvTgj1Ix1KRQYGKTQcROFE0+kMx1sFHr/4vvdHK0HyQNlEXEvN4xqAGc09F +X8PTfJGVhuS1csWJqNWEUjJ8DPhIt9IKupX9VbJSxNMSmeYWUBa8jD+P5aQpHrn8 +7AUNZCIDB0VHlKrLuoKCQXSmsZn7VExcZenko3EsvdmEmB14jBEtiGgDaDDAqFR0 +RMh7kqaGwBu70sGqo+0T2wi8DVtb0CXHqj3ja6Ifu9ycrLVYF3L1z/i5wqVYV+Wm +mztDBaPDFHtaeYFsXxyV4FO0ygiwdpY8nKhrcuE6jrGObgtX0+O0ccKCpINRr3Qj +1Q6lGymxQjJVgFourNcopk7Dveq3RVzv6Gmeyz+pza5fy0OWDeX9I3Azi51omod5 +/0qj7PdQtwAwksc3j+bFfNBDtU+YBmGXK/ufAZRwgBLqJKRiHpqk7mtJ/wbls90J +5NTsyTsgIuvSl6CdgGS5+Bxpq0UY2H8gpHSOLeO+tnd+bDLEUlRT7uUhNO8vm+l+ +Db13aStdAa4hFrjA5gmWb5PWyJSLRncZRPs+NAXGSUZ3Iu8GS+fGiwDGtZdui98T ++GAEqd0kPWkqCwjsdlIIXS8m8C7iNB/WGPcx1zrhQdQAEucMM+ZREn6YPwxcdqwD +DIiOJwnv3+iW7DOa5xPVwWNLIg/XAMiG/m6+psStSjuDRWHp78qU/LYA4JuLjnZx +XUGlb4/YCGQ1FeDNtkAlu0Ltlw2qj7NH2j4DBMjcTJkSrf9fyyF68nUnjIGc1HDc +2gzzAru05YD7/3lUrClaNb9VMK16sIpQHYtvRnFvJimZKBIUuo8M5lxmM7S6jnwr +ueIKKxOLlSA7cHx8aM/Ct+6sRu3K+5e6J/5TGDpab+bzPfBERGb1IohRsTZDgO6S +uflOEfq72+y0pxjAUAVyAQVw5zZEDm83OhFYoCnwr3qi3kDPL4+dqUW3/zaSXfGy ++KZAvteWEhsoysiWze6hn8WLkuH5lIfeUm2nY1rJASTb6Ob/tFBPXOFo0IxI8gMu +GRH6D6d3Ff4uWky64LX6FSXS1q2aaG9xRvn6OEUkKUQkeQNuuLcfcwGj7CJurSui +BhKaxFt+xv1TJK43G28mPAouFak4DryeTnrzJIEUqBYamNbzPcv1jpi9qJgkHaI6 +XfDbtSxGZR8EXen1cf1eW4gxFs63uDT4VZ/UbNM7TuqbEz5St4ti6ztY/vbiTE0J +l6Js6IGeeKQXVDin1EkVP5snxkxUO6OfAOT+K2r5aEUPVrY+GgCftkghO6fnh18Z +sb9/IwD398fbnoffswB9inAggImy9DUFEcNwZrpre1ehbH8JNQkeuIN3ssknMu+k +h40PcURHBIyIq/pbznaZtWgT/rPTa77+Y0C9nIqyBjZHuuZ7p9XScl6RvvpW2rRS +MQATFuI2uPyF86A+Mir0qf+ukxMj5229vZBi7BB9MHyHjB+9C2GwulyD8tCiKY4X +NLTPjJgI2MTc0rnbyfzT4sJNVe3LZj7mTgz5MEFW5pIo1h4Vh86HLT7rS6kpvcuX +EpK/Nygsq3KaouzizP1xtPl7hIizFZnyaSuR8Z6hS1GRhytlSJnO7NdA+tHmjpxl +mvyjyQruQZatqMl8dov4KhcBbmmdA2twr3nqCo4/J7evu4hou46USYZyrlwdECY7 +nIrLC+23FnVrKCLLPwRccRAdyLuG2qNonGCu+LtD+OvhXj9HfmC4h918yzxnWYwn +UFWTZ0DgeNb+vL1xHyzvB/ii5qpqKfFeeHSMSIr9+y0JS1s4DOCpKdxsYoukVlt/ +Qz5aZX1+zb6vP6nLX9LOLn3ePtOz6So2FcWumraqyg9K55dxwONdjqwvTvhCSlbE +BH5XjmthhrxJP8b6XAzDUaQGsBovVIP+fBLhjw16pt9zYd6AzpJV64cDW2rniG36 +AveUDPvH2DVFkDwXhOEDC7tZLm9mNfkCRhYrZNSYSpXh7MvpTsR/N672xlSzMqaT +XbHG6Rg8BF1s3YZo+vLevfsaQSSoJyy1m8fSGvWxA850czks85/h9jgpOlo7HcbL +PtYyX+BwAHTQ/seXT7edUDBES+NvzoBgkV34Us1MO2p6AmBqgujLl6cB0weyuE9y +xeyRjLiSWoZ+1dSbwbN5fu6tMrtiHXApC3RMP3EnoNq2dL2OOvF4LaCPb4DEloKn +hIZAWA6DFDHzI9vuHSdTmJMz7mBdaWS8Vp8taPgxXF3+c03/jMnYvIRs2DEyysvk +vzmuj2oWPCc9lww048APplUUPyWs7llhdQwkJQ9N0PKUTRhFqT/v1+efMOzYqBOY +XsM7pqLietEoGDN0TT9U09BZYQp8HDBGiddtTwUPXISTwTpWRYneZnql2mEGKyYX +0ZbyCHlNxvA1tkzdi3KQDlVffHOQb3KolhYvUQxx5H+erTlPntPQ/EWljxpxpubG +wAhwzzeZcKbkbOfhmM3dubjvDsmM3Uwna2uwU+kZTV4Gz10dOdPQYvcFVyTVrHAw +W09ULjNUgeGxOFBEYejqsez9l3dwFDL5L/FJ0dshU3QKV3qjDkyH679Gj2wZ5U+h +MhtgUF7P8qSkJhExNjSAEk2TRLZDr3aTp08gBAbcS9oPHnxxk9bFqs99HHXq3+fM +DS/xTf892IBsLpU/2n76hm2YtjQHh+ZKpap4hQFFLqTFxboKiEgyQVSy+RB8aPwc +nUmbV01EKE+vOnjx7hRWk5SuSvRVq6UG2bRzqzHuj8Uo8diuWaBxuUEE9ez2GBUR +2NcgD+hEn3VeZPRmEO3IiLGnHVhqyAXm5EEAe76PuzMW0Z+h0syJv4oVnMFdTNTK +0H1Ch9pYxuWUNF89/lbpVcVyh+w2damDLkWdUhm22Q37oes4pVYp4A7recogEiqc +mDLjOW0LlbSPZWPmi0GlKpRWde5qP7uzQY85ZenqemgFK2eVLBfNjh9vT4paPC/+ +t2QgZHqlqEHoGmiELPJdOkJpT7Prsz1IkDFqBZ4dUdnWZ+xKSDX+/s1ZZDUeXsC0 +kNI22ZAVJQApiBbPM9cVtk644nKuz3GT9uqx/zoIA0BK9aKu2LdV/ke348xsAYM0 +DPenNqS5xCrMbi6NJOyXIc7v/Ch3J/ZAyo8Qq49m2GLlTW0pX5e3ZeHmGzBNgMpD +T6fldc19wGEOYQws8GU2dw9g6iUC7lwfv254LGeaLuQ6SrTpoXNgICY/ZaJCeZ1E +TKjR4oCzPogIqH1LgJ9RyGxKQ6+jNUOGWf2+JcDjCpGF7Ndr5aA6NiBNuwqsDLyc +tER6AZuFI7HkbxCZtKpiGtv8LL5eNSelH1sUol9jBSSqTtiM0rcdz/ZseIdTrNer +ii0KwQ7ZODbfeEuXk0l+3FiCv/ijYFy94XA2q3I54TOEdAI4au47/koJn8Dm6l7z +7VwxhIEXBrRpnHzwB/7m6lh5l/WctO0sjhKCDZa50ro6jvtmn+MUvcv57nmE9oWw +snlKO60Jgqm4kqnxQ07rUB088ig0sywdliHy2n4p2Dbpq7QaBDA0n4HPTIr5Fsxh +Om3BDMGFnrj9yRbHV/UP2cTUOoqrxhwBNZigpkl757LGUF16WIj4FDYzIOPElupr +I3ae3uu+bv2Qks4JSvpGyiBluXxfnStmGJZVjScL0K5bvANqVzcGTZX11wzY1ZPE +3pZgWWsR19lmKGZmx5cDv3UfovSwVp4Lo68lmJFCxttqaU2v3qCiiyxjvjzBIH7M +zbF1foDmll99LnZQa46KxFlAvv/hzaZeqhsLIETUGxoHRWMEG3l/qHlBCSrNJq3Y +KXRhYlf6vQYIrx1//FPeCuGce4nyHnmt+lpRMQWut9EAWDjPRJDbYb5u6iKdCdXk +cgYUtHMN/ltJ+tFidiok0FTTvFuy3dZxDySMIEOhcQdFqkuzf9nTn/pEIXg5OjmA +a8Y603QM2EiIjoXH6O2bXJXtTu7DUVeezpX4wfXz7l7j3qSLFvbI6WNfIwg6B9p0 +LJlawxhlEZbbqZJMO+8OmwP7+sHFGIv47oBEUNlHszLFN4z2r/PBeoprGIo8cpsf +pYDf38JeYo0hh2uoWOR2+19KuFNLRkOgD2nMlQUJKsHvHm4UH4IXFjZccHZVJZ/K +tKdhAeJ9CFLTfAIIn4jzAvrkMdV6tbFk+e1hY4MKribfTp/uoiAuukb9C75xL5uB +qmeCOLge1s7sEGWlgx/eGtiXRsja7cyehyFyBajrtUN8crRjN4D8FMH0meQR7Xgw +rKBEWNlsc/KJBBnJHKnzLeNDkTW/XwKDX7RWkLmTbgwJtVaFZZGxVYif4LZzLzxf +fsJTza8ZugP07TFnivnsjUfwfpc5Cnv4atcs/uoc/3oW2Z/icAUHq6tfY1I/DwGW +QgWrqZq5WD79fSjjgSQsLyEgDIo12o8jlISxlcAl4x2v34spN7ZNhyLiHFZT+iNX +GzkdXRmCQTW5wUFbdaZW5ZdCfGqsJSkp6OhHd9Sk5gXfBNk++nK5VhQqCeijFmi6 +P8HeZO2F+mm1ZBS5yFXLfE3a0Tgut+fey5PIbH9Hja3Fi5sdqephsDtNn5GOYIqI +0MAnpTxST1X+tMuS63sRZ7WqsUvaCSNL2XKLDD7yMNZ3O2UZ+AJg1c4gk8X4XcaC +12w0eQbn7EUJkasi8DcPnhmp65+rLUOVtm/vhzVTfkS/oZHJIptfECPEdVy+0VYL +C0getEsr+wKvKFLTZzBuCXOCZvUSSIqTnWKzFCaB9vHTJr7Q4d1HOQQY4xMHEbxI +h2ibA+v3Y63Rpp93w/uYtJAfecbD1EgbFfvjDySVorJqag7AFe7/gYp+xWL7glNo +GI6YR229uticisXy8JmsKlXWzvegyllYOjVJ6JkIyciOKfyr0TNO/X/Y4qa6Hb8B +SbfndxuvvNbpwQ5Lgo2K5bPFTQwyIglIe8CFoFVzzedHTI6RlaZJsy7c/0Nzk3B0 +65/8k4eThGELoeLS/mIwWBT9EY1Je6grVx5xinJFOKhBQbPlWw/aCpyQCzhR2FXB +Dr/5tlM7jrUGjoHrulyUTNVvmPqOf75dsMQwWcX3oaWkounXhZ3PRAd0l2DMNoAY ++7mw7IqYT0ccaoz7+e9YiGDRpNfhfyERHm3JuvDkZedeLtW3HFybTX+1ll1AGjy5 +oyfr0ANI3yQ8KxcvBjG96zB7x9IP3iGN+KpP2kFaEQztaXbynZjtYoN7R47Hb5Ee +525wPDIalX+d+IK3t80fi7lDPBnYAFslt1GhsCsEody0vCkxvuvC6Q== From 2b14281e1bb6db9fe03992af8a89c96a222a70bd Mon Sep 17 00:00:00 2001 From: Demetri Date: Mon, 5 Dec 2022 18:32:51 -0800 Subject: [PATCH 010/495] Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos Signed-off-by: ddimatos --- Makefile | 84 ++++---- make.env.encrypt | 542 ++++++++++++++++++++++++----------------------- 2 files changed, 315 insertions(+), 311 deletions(-) diff --git a/Makefile b/Makefile index da868e7b9..f0f6cd9d5 100644 --- a/Makefile +++ b/Makefile @@ -58,43 +58,24 @@ divider="====================================================================" encrypt: @# -------------------------------------------------------------------------- @# Check to see if there is an unencrypted file(s) to encrypt, you would not - @# want to delete the encrypted version if the original unecrypted is not - @# present as there would be no recovery process then. + @# want to delete the encrypted version if the unecrypted is not present as + @# there would be no recovery process. Then check to see if there an + @# encrypted version of the file, if so delete it. @# -------------------------------------------------------------------------- - @if test ! -e make.env; then \ - echo "File 'make.env' could not be found in $(CURR_DIR)"; \ - exit 1; \ - fi - - @if test ! -e scripts/mount-shr.sh; then \ - echo "File 'mount-shr.sh' could not be found in $(CURR_DIR)/scripts. "; \ - exit 1; \ - fi - - @if test ! -e scripts/profile-shr; then \ - echo "File 'profile-shr' could not found in $(CURR_DIR)/scripts. "; \ - exit 1; \ - fi - - @# -------------------------------------------------------------------------- - @# Check to see if there an encrypted version of the file, if so delete it - @# so it can be encrypted. - @# -------------------------------------------------------------------------- - - @if test -e make.env.encrypt; then \ + @if [ -e make.env ] && [ -e make.env.encrypt ]; then \ echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ rm -rf make.env.encrypt; \ fi - @if test -e scripts/mount-shr.sh.encrypt; then \ + @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ echo "Remvoing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ rm -rf scripts/mount-shr.sh.encrypt; \ fi - @if test -e scripts/profile-shr.encrypt; then \ + @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ echo "Remvoing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ rm -rf scripts/profile-shr.encrypt; \ - fi + fi @# -------------------------------------------------------------------------- @# Encrypt the files since we have verified the uncrypted versions exist @@ -102,30 +83,45 @@ encrypt: @# -------------------------------------------------------------------------- ifdef password - @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin - # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - @rm -f scripts/mount-shr.sh - @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin - # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - @rm -f scripts/profile-shr + ifneq ("$(wildcard scripts/mount-shr.sh)","") + @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin + # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + @rm -f scripts/mount-shr.sh + endif + + ifneq ("$(wildcard scripts/profile-shr)","") + @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin + # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + @rm -f scripts/profile-shr + endif + + ifneq ("$(wildcard make.env)","") + @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env + endif - @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env else - @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt - # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - @rm -f scripts/mount-shr.sh + ifneq ("$(wildcard scripts/mount-shr.sh)","") + @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt + # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + @rm -f scripts/mount-shr.sh + endif - @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt - # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - @rm -f scripts/profile-shr + ifneq ("$(wildcard scripts/profile-shr)","") + @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt + # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + @rm -f scripts/profile-shr + endif - @openssl bf -a -in make.env -out make.env.encrypt - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env + ifneq ("$(wildcard make.env)","") + @openssl bf -a -in make.env -out make.env.encrypt + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env + endif endif + ## Decrypt all scripts used with this Makefile using the user specified password ## Files include: ["mount-shr.sh", "profile-shr", "make.env"] ## If no password is provided, you will be prompted to enter a password for each diff --git a/make.env.encrypt b/make.env.encrypt index ad7ae2396..84560ca7f 100644 --- a/make.env.encrypt +++ b/make.env.encrypt @@ -1,267 +1,275 @@ -U2FsdGVkX18+W4d9i/Pv7SPHW1HcDa1oC4SgUC3uWVpK7kaXkcdk8lV8odYGtLxf -QoKF/usOm0CkUndeJRTcHdPL4wXDkOp6edyjXo7+7DjopKio+g6YH6q/HwTeOIqR -EUajgflZHSDQ3BwBFNnamxthpBUKmLuOqSd/OMD6FeWsKMW7Tm5lJNBH0sfYA7Nr -sNcWUHFgGJeurhKK3KIHqeMo+Yi2NbYXJ/s3FQGdgUyE+C/+DiRiEUsF3Ej8Vz9N -2fQOy9Z7LgPx6pVVzBzxJxPWAnz6W20rKIDD7Cu75Vh5zJ+TbTNNIFbvFg6caCUS -1/ueAvDy0NSJJle3SyXgO5ApV+JNbwRPTkE2zcoFP5EyLG59t/Tks7U/OxjN8vwV -Tl8FJcfc5tLAQnpm0hxVDph2++o0evh81vXQoUH3atNzPaldrCIrt7x4gzUa579j -cfTMiivakat5dgT9ZE30ogjk0Xvk/phox2fkp3t3GFPrk/H1yd9uC9mUjHww+q+C -Qm1H/5ovWpNNp/qX5npZQOJmb543XUqf6Y6gLrfm4rdL+2GYdrR0fGzaapSx+LZd -owZx19Vnj4NRfk8nHRF8NupffBWBQeDhZcXoFESL5D/pvSoChLUwISv/fw9o6W3w -L4LASrZAJ5ltcG0wdDy+D7si1lDOXREMv3j7tymG2Xtuc7MnVjidFgy4bHz4xLAH -zWBDFXVMIq+yRhL5bYAm8ffVvqWBXQz3Qq3LhKVf4x3ET/bl3jNjYVLMdGBskakI -tIDliOgssk1lAKiSzAF3J6tFN30yEHMfeTaHFl+LUGIfcNMtiVspTy0eRsgRaYv/ -Yr7kktcjhDZlv1Dgcd2Bk7VZwC3wV9DL6uMVwLTNwixsmS+FdVVmoo39j+NMMGDe -wD2PcyQdqHmmnGzhT4sIiezFdatMtWtntpguPjst1imr+58Ujd3D2w+LzE5ajx61 -/9+8ewlb1c3ScuzWS/9bFztK7jjL7ar6aI2ce+eTn70OnurJP3Dp9VBHQlz7RsaH -cqK2dyFcfI+Z3UOoTe504i33Tw6jBvdLl+o2VGPJa3CXhvDjPfCo3kdDMl6g+CLh -Bi1FKuAPkK0y5AZdC6ZbVIpv9EpPovkFrkxngNVmDbrAELMtNVIBxLIm89SghSPT -w+oBNBNvlzdkIARsa0JoYjst3YGDBo6NkLoZOcrUO6Ct8OUtXrXC14FBLpct0yUa -5FDX/iPrhKqsl4HSJ+/FuzMfX/TKGaDLcfL5+x1o5r1liFbM/VruPu+4AsFfiSv+ -Rfrb7YCTgyxS7/sTl4iVvVC0jmpA37mgO+g2hEdCv1n49aNQ93jp/rf26A/fGAdH -P+eGXOOJJyRNsr1knSkmpdOpIlo5L0np7AUpAjI7pC3iSDGBu8JXoK3ciuIY13NV -tvitHB7rsX61GHr23ph360Pqkb2PGkkBILUNF8ZOFfYoLskUXAQQitZ7MkGyDXJM -e2tBJ84BBl+xk9SXzOf+RR7oYL/caWZksUD44bn7o6O5rKlyHeaifb4pRLjyZawg -EYvslYtAgsSFEFFDnAObkA4VlpVpfWzPlK7XTUsiMEpLMPv64adYGsvT9JXlPTKw -v1JOWdVz5hqSAqWMVmXKDvwwVnetVhLsSS4bGBDaIKXFgvCiBbNraURnN6/nR6Ve -bIJvsAzcTJ/ln7u+D8LRiD3gjbWLT10wufofi50tyLMI7/0d7dUrtKgDw1XC7+/x -pNnsKRreRCVqQ/dt+fXMcFBhYJ/1e/SIxN7F4bYJKPfs7yuSqdmnO5mdj/hBZMW0 -YoDVjsBTm8EHkGWf2CoAWYJlusIuiF0YjkLbBxFTdlyZS0EMm2HFrioBYSHspWH/ -sA6x/1pxOoIxHO0EEJdaoU9syOwnUS5D3pnfZyVtxIm1gquAx3BXPIyuXXgBFhdZ -GSnOLzLlQMmCRKdlP284GIRg4rJ3s2kAmGS1L0JXhDzNupysiO+X++ztyXPtYjnf -Xmxu2KqDN1a7JS2bKRGwruPdXqtxmLik+alPQ5UDpuG2QR6FFuW4nOfC3R8gNolh -ZsMnr6dtX2vsM5nfb4p5DpE3ZOIMz/PS30pXC3179pw+NYpTuFqoZMyINAVddqWK -VAVkvH49dL++OFERPXOTp6Wa1t5Mo1sHySMx2kftWv5EHWzw2Zs2c5W9yfT1hsLm -uvpdu38IM3niYKueLpGoavUNYDmNivo6C1eca8I9HxZ8TDhk/jqQD0j9lkTg0Wp6 -CG1LNc3QEerwX8kE9yrNefq0d+1MCTsZ3Hb79pLpVYHZBSemGz8BP4OzgA5rrq97 -t5RUPz7E9vkzGLCdb4p9ln/JYY4M2sXpK5lal/L1enFfPb+2Mk5sRmHEwTnO+VSv -PTSf3DH1lIfWnbU6WeQZSohHQFasqsKRIUYWUaucQVEGMEm56bnsrciumwLJhkNd -JSGtWv+zh8H0Qrg61ehxBUM32t923ZX+TKLkO1dHoofvVpmKQYYMCtjQfuqqlIVF -Uuc5RQzJgs/cWYbBseZMMKuGC7KCwlqwPnwHqoT31LTvAtbhxDHAXKukpzfpPaBu -BWKuZwUCuo+KzaInaaABOelUjdZQJt6zVN5/OjeKG/fEqJgAEvSLjOLzAmDtA3tx -6nNTgfewTqUaL3iWhsz2C0Kkg2pHK75djwA19sSv5M6ehe8odlTo36H+JE7GAyH0 -W7MGXLgjTTnfXk7eZcdk9CaLvl3zZmzMs+9MD8rYI9RGWVb43L3l/56QRB1WL6rP -k9ntQFX4lgnDBukyYBu3H+8byZSwoSspDiybfSBVmU2F7uadTHOQAtP4aMp9y6le -g6Eyc4n7nGRaG8kOJ/pYKcXMDqACT6N6fJaoUx4v+U/6/dyFGVe4cVDVlnGW65Yc -NQT1GrsxzUmKgSnXJyS0YQ/zWrOzt4DUybQ3Gzc0ey1yX79UVD5D6IuvpSbfzFKF -P+8OAtICNXOsPMvYJDDgACnOhs7d9DKeo9gV/ALY+i7CmlcrESG9a4BbIjMhpU8A -HfNP9KHatNzc2ja7RqHvXZL5bqLU8PmUgU79SY0t2eZaTB6ZYd2/F0iVBtPJ40S8 -O6aFtk1hKun1+PD5GBMkhCvGAl+OV3vWA8TJwfr6K3KZgMoDHiZvDe7WTuE9OdGQ -jtkwbpDYmmQilANNli5xiTQdK/B9SDYOTyhYzdNmRg2EF1BqPydl8cMDonGAz5GD -/oDr42nc+KfcIUvWFAC2WqgYn+CABXINY4CuoL1T9a3IDkgyP2OvA+Il7st34GH4 -aBRhYHPU6+d1HtmlWlZ1vdMJ0a9r6dV/0od3lfnn3JjA2saW/3aO3KRLEhdSvkBS -GQAnRaHjOka1maSxungIdLFAUddk++fPPLbnHVV7xSK63YueI0EVfxw42vP9gLxY -9K7QerZyq67vWs/qY+C1P8BylSv6aZgloRX0Kk+X9bV09t5Mo2c7Tr69Yv6hVJti -5bIdc8CiTt6nBIKaz7RYa743r/fAggEIpFAU6uUulnl7iqMOTHsNTcNTZyz1dbbf -NcC0F4TuLFNAaToLDjg3oQRni8LXG8wDMUcnC8ddbKtLTE2mVNg3QOnshZ37BSwy -JbDsJ0wNcS32XhEX7N7fl6vH8jRZJLsEUwyZWev5vB/BcxIN5iAV7Z2WWyWlB1rn -Vg4JqQaXc8jqhRWW3RRfkvblaDTmKgAPsthiJTTc42B8p8IXkZtGw1Io8cEd4w+7 -GirHyzU66+c6mz6/LRIc93OvKEGJDSbhhFsMQsNDwUPddqNKuUaMECIUbtiy3BqM -Z30Ilaqt4hGRXCmo164F/MEQKfZBUNz2JOCEOfsD6fhAmW+g5mTk0lo/79KI8zBk -ny17zbdUBdezdTrN6mOnal5nNeXvLxB+6xypEvz9sLEO8VgO7WxvR7AalBydhNNx -xzelrTnHqm2esJp6/MAg8zT8UG6h115etI3wP/8ptwLD2fPLDlmiufWjJeaxZZXM -b9tY36ehaWgyk+9M1y+5RbKOCW89xlXlY1FOuRUGQbPN/44sZiJV2kJQ27FQPyvN -ykGkVfibL2WBSSzP6UhlL43miPglpMA6CG8ygrez54q3J5p+cFcCwzHbxrv0o30P -2PLzbKlmctsvXaRCd81/tmuACJOUIduzc/NqYgaPP49sovre4yjL/TVD5RuGEZeI -JFWvc3/xnO/srDAqcQ13lCh6zqXtCpllytHMLm6Qrdd12igwd3E92pwJxIrSOmBy -TkIvebk6cbywRpeetgnA6JDeNnWtE+uIMdvvF5t90RXikKdalJK/hY7oh0byXTCu -U1EDAcaHMqyGJaSq7mZdJ6lt02QUDmdHFfUVSRuu1hIxCff7btNY0Ug7hDmswBZK -i8ukwQv3nELoizwhM3ZYIh2BAVHoncNLcorCtz882uJXtZbMSC7RiZ3CgnCjiarz -xjDibUjb68AHOkS8HguEtCB7SAeUFu/wB+5GrQIg1zVquAieHqUzXfWelwFZhO+M -5G19qomUilZ/3NtRvzxpt49oiqmyshyjt/AYLQDIZmMTFxTR/8ydm0E5Bn39JSsa -lXCmK0Lo11i1lhV/lDkVC2bNXo1KQlm4v6A3JTC0Wo0iJD0gnIyZ7YLz340FZV04 -s8rSOQBRuN1AOi0SbmGMU30y8fuWpWF2gqenIeCDUL7KrUDr+/jTRYl3CgogG/wc -rrdSyH3whxj2/fTPvOsy1cHtiy8u5ffv/cMFJuFT3SVPjJRgjnl3zchMMLEpjYkJ -AufAQ5gLmM8s9GGlFOPQs9ha3/j4lepahkAXNkqAAetBpMst7HRfxcW8S2NT+1Fs -rmFSGvpbzMkMkd6aFKUhCirKwJRiYNB+kZBInDUJJSlVBpBwgbGo5jb1oIPyZsY0 -f/laKLgtyP6AkLBPyY3lgTmJBAfC2hEQOLNJlMO//soYPkV64pvb8h9lQXV3Bn8Z -rKkMYk3SiXEmGzoFb5hRXqHJagXf/QfAbP1mhCtl5SMJ+bFxR5OdKNI5Sovl0q1p -NEdLJEElZ3jnIjHAoj4xRmFdwKa3ajFTmtlPw8v71M9z0rKnmtMynvVRfSb5M8xF -q98EsPzl2lCtGnpzchLNKEyuwtiqDJOlRE7SC1ls25BAJhu/LiEfcOsE53bowkp/ -wa9Y6A5HLQ+0+/sBfJy3fB+ufapFKGD9RKesSxJCNUC31v0vaCQHUmLyprr0Ftvd -E3p7drG0Vo1XbfZuFWnFEtAjPSHfpnXOLWjz27IgjlsDt+JxX+IIe/XRp+Iyl+SJ -I51Azn2KhFNnJ15RXSMU+kVAYc+5AUMlcsKgRMrF3CI5QMEekCByodV8RYbLzOjJ -YV5CptYwGdp8/x7zgBDmCOz88T+Zkr5S1iHdeh0PEUMEHOtUscpB/yuC+k77QrBH -YRdWhrM9vLwV8AvyLA0W5Dx+hHx+RDJXCN3RASS/t0H1bgiUzHTB9gg6YwuNQK8S -bY0bIqM6KlJ2CxN22KBWcN6eJM0E5ljmtZo2ZD09SQ4cYnWjp3ckrY6lqpfiIrSB -vIJVCSV0mPUgDlNdid0aOhl33DiJPT+6vmaXxwvADHym/ag4i0T3fnG1SkUWZaQj -auLTkNodiNNOaFYZqz3frCdFbtkCQEY8Nbgt75WSHRO/tI6/VuV8QPZTHzevlyQl -Lq+Smlt0a+JGLKpkViiQwv4xctaywcl+juTsQSfPih8owVCjzMMbOdl6mo74lSHT -cmerKgp5gtDac500g9PgB4hCWYNsdJawFwWnCa2MrSwWa5NOarr0KFSULb0BmBhv -b7e0Dxfc+b+2qRZBq8Pr61j0dt+x4MMMPjXS3HfvWVpqK8MzRc9fcGKOelVNLuAy -Y7eVkuPrygQUqQX6tVYBrBREnbeU5+xSrxvPbW9whAN6kvpM1Z9dpLKnY0XJ6Bu4 -mm9z3PV9ZlZWCjozxpDpc3Cvpod9RE60E6KhshpXPzbam5TiFT+YCBOTA5AvIzip -QRGYcCzvMgjTXajPdm7pHgAvC2PScuj84SvdggxCp37RAwkQ0eOIz9WedeAclkY3 -mC1vt4Px6Xg07i0tIRtzpi9oQM2bTnHcPU846eNh86dEhFu2WwHdCI+8LK8FrbAz -eeq1fIFO0UyAXSGXm0kgkOEo+LUPzpqSwXyvM63uMww7oPUhaNwjH6HVOKKdlX2p -KVm663fahVOLUJ2Oc/ehjj7J18Q9lgT/5S2Z8JMom9WaDXNx4e0OAT95flSbF4IT -lSKRkscQ1G8mnwNbS12r+RH490RQo0aJ/NzLEx4W4rJe49G+jDb2obpGKQ7/ZrWa -b0zu0f/sFpO+xkvRgJ9V80kCVzzSNc8UyktkUp0ZeaNvxVMG++R30bC0//M3RFkR -ky2B7Hw2TRifYV9Uxe46T6Ik/TvtGY8mn0NX1PsK02SG+6GqAlOAqWpbPUaKMMxg -3A95jU+bSiEeBTEeb4/Ydm6tXqUeW5IkT4RMvr+sleMOshmzcY6aINkRN4mApYpG -a4HEqU3+uaClPEyyXEkV00Qb3BK+jVOEX/9qBP/f4zovhaBnMLupBnzhorgyMQWM -vxNcyENv5B8yidXf/qUGDb3cAgOVKaSEE9knl56xSmJ8tH7GPFXHYRhnQYtnQmsa -r099VD4Jgg5075n+/y+rFnt4fHFYh1eF/qg5PkZcSpkc5d8a6fePgad25TjPPbtv -q0QLgzJOPeBJZ18emPQTVlh8SwJ2mcaF3RlqCZ3QxFPPpX9zzyhOSiTIGWoMrT6B -HHNO4oSXFYK9JhQgqRjMfdyyYJAb/ITxBX/juzyqPFiCPvLqLn31XfmvN+L3/iSq -xPB+p6oZwsGxrcZnkemkAOmECo93bu3t7VomHMLaj2RGTtVzB9slgjaTorNm3w0N -+N6P/kgDknJTH6x7UtR0jiqcE8xIi00B2fOwWfmSYnbULQLboqmLH4PDcE6Fev4P -noaWOrDtz197g3D3Hwa8XuMI6ZhKE3HJa0F+B6ctPlS3uhyZWZJwNjm7BYU4cCAR -lwRrv82AG8Tsmv/3kS0jpJfWxiWnupsHB+mLUIVBvP3kNprafQUWrir2t7B5al42 -vymL9UYcWNIJkYgtaPmWgYyKGXzbriJi8RiCKjCTkP7jC1DdENXLAeWtXXqTmMyY -uqnBHv3TZd3ytZT8sUpoGI5JLQQZO6JwyRUSi8J3qp158/x6C7w4wP8IE9LWeHaV -JbhnR5Mhw7kRarAbd//I1JMrZTLOZJI1dkYROyo/LA62aUiKjALlP7EGgdbIoYFJ -+aQHCn1j6Bl54R2J6dcGe5hxIlfx2gnihCH5LZUNPlZojtvxOlcsTT0gK/jRmtm4 -RM7/0maZO1rMiXOjwSPax3BYpw4mvqk9rcY+pDatkHzznTKemvt29fY7EB+MnOSp -lFTzs6oUsyvEDc0f2RRinoYG8IkwiXs1ZJ+OR+auVVFtdPGYWskmeYiIUwYeVtnA -MsTd0jKLc745zpFlT7njRTNobVFBsKfX+zUVStFiR3xMP0xrjbweCE6yZl7FqKOh -JsAUDsjs2oq1OgvHB97y/egW+wMNCDJYuUL6AT3/HMIw8BiZgcZ/yxkTedNxavIu -U0ejzcyBQErrVVYqwQiHVI/EuUcV315ZdAVHrWH1CHoQcnTnrXSoyNm2IFLCtk+T -nP9UT8pypoiAralBGAu/OQ7TKtHjCSJvdmEf55myHS8wwgaFQqWrR1PJjc6OJrGb -8pydkmKHshrsuMk6Ww1HJgjVjA4qSfk8CANNQgV53JaXIc39uxfuzTkxyVyFp/xU -sHWKq5i2mgpIUYnbIdrutbyq0pfBcNUqswEQWa6USsN05fOfLfuJXQTaaPfaDJQF -RUrRyJ1eEYV3Zt4ulhMUlZzYZU9uU3X2Suk1vsUgxCSOJzKKD9Eo1NSSr+gCzdqD -PB8UiKyuc/Q0WQwcWTH0Y6iuylnVQywOvXDWXOiNaznMMnPE5b1Yp4t6+3BlOUr6 -5GombF8w2C4rYl6EiceWXvZ0PHSpxacLG8xgA0R4Cm1gAStlwbgX7+CrFRy6KV/L -CkuD12XZo6iW8HduVA7tk2lTB9RFMDEfomiwqXWOGkJgJLanjz6RNmV+gZp63pB3 -tNDQ5AiIDoGFD/It5E2lhGdTubXXi3bmnUKfcU9zUN5zIIZ8MJTC6KWHeL8JH1Uk -KGhzotqeFH46uzuWOU6LqKoGProk6QxEgewkvgY0wUCFxfc4iSN7hQSFQ5UCBSsk -BbMqFxawc1Q6U9RMGfi1tDXURmsyJW2az2fkdkTKS/uWkUGKgVabbs4RA+lYv9Mi -Q2sfhl8lHKtMMFoBM6H1Ias/Hr+WpCiy+wqzsALdhxfNk/4GMu4g3LO7y5IR8cY0 -2K40cNXC3LA3qXRPvZ3yd0OwXpOpMtryGQqhNtWm2BPe+2o8KeuohXPI7MCeeKiv -CPmbrrhQJJtE/wF8JqqVv5pR9bpISaUq+QxlCA6sRS7VoOm8oGZ4R9AO0Yk+P/cC -1dJZYc/VLqggIOu9DBXb6SMm22ArHCQ+OtryU0pyEpwQcbG/JvhGvsGz+ztY2PZo -PTRaWh+Wj2OJJl0jpBYjqAo7Uy9QSx4oX6q8R4mqXTSeKARLOSMw6ccdRCG2QdCW -FlmmeRq7YWtCCOLXbKJVE2AlaPBKG48E8DmP2ndADOlIYfWnFrJW5yJ2dP6zXQgT -dfOLBiJshxHzECBUvyX7c824atznnc+jiGQL4zThymOCYK4XXT7wDdc0PgIly6hx -R1N8VAH2dor1sSTXexR786sL9BEgr5QLcc11xp16fQu9+HSafsEn+Qupw5nekA+h -rv1dUfuuWQ5cjlfPk0xmw1pQctYfqoVw2/4P6nrzxKj7uZLfXqlTlmUCLr+YEbxI -2iotSilWBmyIHV0MA89yWBSM1cLxiPEuACtLuqeh1r2sya02rCdn27ZJcB7YdcPF -fskVugcZRwQvwfQPSqzKExSqhp/frIgK1Nq7dThGQjQ1KhNoZP+EZRrobPKI60DY -ol5Ihr/FhKSQfhODcX7d+yYXD5V8rGtkBfr78QFsUny2zR9GpSPQIwPInFzO8Vhd -GmDDfGjHDXYektRcuBe9DPRKVUjrpgsOXX/kzju3OEjb+ZfEL9eyDMnBTL1ELCzP -i6QnRRB10igRgkVdK6CVPY8fFOyR6FHI2aIriALZClm6OlMrEcCPxFfolGLrlIR4 -S0nYhAAd2wbT1mEot+LJ5MaKhXgFvvXCW3j1AU0cP7mebvW/LqN+f7VhB/4M9bj3 -Gsxj3CA3UwZMA+/Ufo3NzhHIIgBywDK38An9uqtQcNWwLxaax5FzmKAGBFT9XfnY -zAazI19eqH7FezzF7/nkNA395PS/+Y/gnlavEjst2zma2Wa603vgx72DdfF67y4v -4tI69ahFtNdVoHlFXfFMwMEwgn9AaK1mfiLwE1qz+CaGN7mRjlN4E42v3JEnfz9p -iKJX6cj3sM9VNEflKEbGtA0jFH5MiOXtoq7+yqQJTzLtTeGjSsaJg7DyL+mBI3ZH -Ir9lhxwuklwqrzHTfwv20ORRXB+vqqEztNG0wqh31Wtt0z+R5s8T+h27uEv3Ttoz -iH14MJbRxEZvYHM71NJchr53fyitdM7rRf7nLYz+mNjTyq7y2ziQwqWxkkKZmrK/ -lN7ZV7dPHnYxG1Dh1tgOeen3eKAL2iiz+hOyp8SsX+HAbGPgB/xT8OCqvASIuHA4 -BYVXgUpYWN9xiHhNp4XFfwSxjWEaH8tLkBDMDUl99UD44aw2evQ2k5Oy5HcgCal0 -d1D+sbplj9Au7vxmK8tzX/IsIeC4lXd+pMTloSGosD1GfSbetfIOMvyfON3Hw8Xo -kvGdIEcklKMwdlmv/wJ43WHPk3Z6i1uwQ4D0KOHdF19R4p0gJd7+RIP9L68UGdPj -Kap49lKaoVotry9GPnkIdeGR4YLJ1X9jw4PunsfnjHRC9tTIAKWve546gMFXtZvy -JnjDogx5ZCSyEAkHnTzGG6gDucDwGmRtgHLfeKBFRDqZYpaPGUPneAL79ypbkcBp -bFMeiZuuz41ugekZHSvdgAkiIUksEFAONTjeXmUj8oIhq88lUpKKYSUhE4Q65HiK -0o/wL6NnKZR6YtMgpHC88HMrb/u9c3CP+UuDYaYpuedeoMIJtOrv4t8POWtTRpQL -jk8GwpcRW3Q3hCtmc9ZLpG5TxkwZHr0pBX5P27I5RGEkQwLIQ+QWadUXnIJqrLen -fgBd4XYF+INXQJWMG6hkYMDs1pafHB+4M20l20RoGAqZF8Zd/GXnYdZlVsQ0zBzT -7JV1zUJU/EJetj3rTTBvsqIA9/MtqdPuA95Bv37wjJG9IhrwB++mpV//Zn4dsFm/ -EN0KFKROI5812UGUyLFAEVK4dxExY4L+No3XZuaiuc7eLifwjTK/CIAN/JQd6yd3 -qtSJXzX1ulLpHJsG9SCvwR+qk4m5vRMhPk+3srweiksGlj5QKTHFwWea4vDagMng -IbyJBC9/BADQHbarQm8bNNCsfkm+K2AsJK6mXVrbFtZsnVt/xw38XNgG/weY6wwO -tK65MMAHdt7uTOPbFOhgYeqRabk1q/uVPdFg+XhpVT9TJSXqVEKLPrpp/viaWjAG -AmfTo3NT2YV/t+2cklVZM+RXZato5QA0RrhBGM/ZGK7Q6w8veQ+e33CzMhrO6vue -EbecGhOs6EjZwVWYeLr1iLH1kNZ714WuPgubl1jzU19qGjVvMST9tPsYg622zwPW -MWb1fGCLcol1FrYwKbb2TSsFrpK/64hZswJIhg0w+rjdyH0MOZpXtyA318FpjRIT -kN8bQZfeaqi/fyBmTMjqUKJWhB9sZj9wyvep6sOj+KMYUfRmLliI/VLcWUakAr+L -88QWpNuVGp08mQfzVrd1BwlJYXUda4ijGWpIjAzDaeeNagwGSrGgoVrVBMNdJmZd -/b2Ipuh7BshM6s4Dt8Ni4T4lVEl2dRnEEIFgnhoSspPZ8q2GrVpq++FKqq8RHKbM -py2UBRRNHGL4WOSVDHMmnOmoaVU/N9qehUZ8JrzpMOEf+lMKk/BwRdbWeRA86PX5 -AxpQ1tuwySzUVZdKTlg022tqZEMHBzeXigr8JeiE74eFt27qBvMELY0MZ0+XYR3a -W7s8Vz5bEn7B+r2JeyGYF9J9MQCUPPe8ukHFLbQPp6Zfg3SwDYbXgxN1YVK6dEzf -esaqWeyilBuDx1xDYkuOqiWC17i3NCFfo0Y7oltCciwcyYQBDZe1AKbH2OUkTeCG -UBvDsrkPdvbOQFDSJNGhij3CMApgzVv0cPQ/ElnVRVlqFDxldENpS5TRVx/vzkIN -OkVxcTqIKZ45cZZUj1Lt/uEtXEWbQPkAZjtdBMWUuj08tqnXytaTRVzUxnU2lYIr -rRheU2gjduV9jf7thI81SC9Lje2D47U342h5XO47U4Ao0UmnQXbDNNR9eRa/94Hp -ZMeOhbcI2ukkbPq2O8UjgyRBX5u086RQNa2B+kzvtDO3sZSuHYZqVlKdGgPxcQXv -sXhBFx2MG+xt5lDeuHqPtqfW8yoayshJ38YruYhRW2SKg8zDw3x6v3rWCK5ESvWf -fqVevQscwoaWzW6aTpHSjA/VsajWPmQNfm9CjRt6ncv08XXpSH1o10Dhzq4QwGZb -66dxWR09Sezs7GQunB6Qvl6rOH9SUDVurhsV3BAy4iTllbZR5QCsWKKkPhhuKMuH -TahkgpZDvvOH8gwAvOhcCcIpPRw6fkkBuuZh0PtCVLOcnWmflQMgoWpZKn+g3bTX -/TEa5pyve/4n4+RMJiVlc3jPk4DwpecQkaz/58euzaNzFdQ4eE6twx/+MuTL7Y+M -75PoFCDjpXtQXsDwNh0UNBYMhhiSbJ+JNHmgSGE7Mix6VvlQ8OqxkBATuvRD8W6x -QJopmjVf6lVUrf7Gw0ULqUmnkESN6D1LGdP44GT6iuTGtZjWozF7lp+Edf3GqMKT -DvBxCqX2/ceFtoc6+dD0+PGS1XBw4s0Bu2W2AqmRkqwD7l/CYDNMs2iRCIMbrSlg -+6/VVXmf1xJBSaeIZ0Ure8EIAK8TX7qMD52V1K9O/mG7S7P/94RChKJxRqtyumgf -soXKoLKjR+AS0WtDJKSGKhgK7zZDv9Nvrj+ex6rbFIagXn+kJgKVrNYpYdhegTjj -4MoVjbgS4Q8n1PKaFbwnAfuSBeUF91NUGzemKhnm6jk6r4yhRTlKlNs+v3gJHbYo -5670UO/2I6ea1VAVSDpqeoreo6YlT/N+enlj0Jz8jttLQWvhiTF7pd8bmqkgfCuH -5rYWKvcsdcb9zX2+odFDnTdYgdmTLfGzaNVmhmdLsjOFhstlDnynKmxnNTWrEuel -dSWir+9aRmof50opwpsDb/mVxbNxKbQBdgSgsLy4OItqwDmKQ5vjw2rT2v4sMWhM -cz6aAY3aICNhpyY2q48KolII3jS9vWfWb7GoBv4KQMOD7qMbYAJ7E2mHfEFtxSXQ -rAZ+Zn6yUjvdb2RNkJjj1qtCIMXs1bQLINLYTIyVa3mmIzosnfJB8KDIrDBMSclT -LPzb9Luv3I6zo1fRx6Ny/QOSomZFtkPgckUpjWMB8FYrUjUE7JWjhjq9qWAZX7Ts -EvDamDla6S2P3ocTU4nv8nDa/Yf+xwqBBNVRtZHsDg5SQTxsT+bpnTpG+9nOTfX+ -oXvXjHMMYPhkqZqBNdMbH4updAl8OAaGcnw70HBnS1lmsvpHNj4ct7v8ezGgcgoL -+gsFcQ/fmr0/RvR6vCaQTAjQpLl49e46Q99UbGVEviXvD3fDFO3UPlDbiYGAVsgB -2yBLLc4tRbSmf1zTVSdERXDSMYpp6B0GxWvxyjI8UcGvOzSb28bdYXFdKxvMR+Rd -kNyTXGcU7ZfkhMcjWJeHZj1vcopgZXvPFAAXQtnLhGrYUTXZJL5aCH+VkU3Luk6u -xQ81LyrcV4cTXrzisGk1UsS+ARVAttPJGeMp+fyKKb9vnrWEV7vX7QFABTACrZbJ -CpU4f1iGuUcM2QzYsZpINX4z9wztbKQpDcj0XtHs5GfovCDEfB7fzcRhkgapq99B -mmYffR6mcOzAj/i3eO/1aBRvorcNhLTeAKyxO2Ls7f0VvLEfsriLvfhxLFoA+Bfj -doS/Q822tIN5rnXoCUj4+THoe018V+l7nDrLunRm3zIk/0j6KCFOMCcEWGe7YTju -6XJ2qX70obQEaOMGd2zKyfs3No3EFqLB6qL4b2shX432xMSUpZdJ0QcjUP0gMVrG -35NeKx/maSjwnqbMlEv2irQvjktukjS9bJfkOI0M0FjQ6Bhl1MmbMVgqod9k4aFI -IgcYhO4L7B1QUAo6HEPhWToco3WT7cne8YJ4RrfTPjMbVL1nmAfI9IYOLHR6aKGp -QohZ7oSHJnPfHBm5/cv4PRuB8olcZkQcvoSd2yyFgzBtFx87vYogmQ9jDNFqyZx0 -hfswYoGcjM0uX0wwL8i0kOLuQ5tDa44uZk6VEFIYf/jjU9EdLL9ah0h5C4PYy55V -707GvDNj3wY8XqqVu9Q3lbMelIW7Io/hDqg8OtaFEysfSEQizhBLMl94hU00lfe+ -I7gJz3MomlgA47b7g2QtMBnt45AgmPM2ziCbcXyNVrh0Omc93kyAR2BtHkXhDW3h -A3VWfpP+e9B0l2GLltBEFvQ+SzkWioDD5Wbt99g1y1R8pT1iNuYMnM1YLyMCNb2C -iUSHB0jwGFmmz4hsVeQRoxfdoQLY3RyZ7N6X1pdHkMRMtGzYQ80RD2R3JpBNieXH -Ak+Y4cb8LDgQGd1YOxPvuwHCUJg7+7aoNuF75M3J+Uhonj2TMVpkWssYklI6u/ap -ApaFVs+c7ck56H8S/ohGhNYsoBxoer4gJz980M5qyAYEC9xQg3RjjbBdlFBbrKY5 -IrrVQiKd8q60wo9cPj+NCLQY1O89UEjqUDo1xKyU7MniGzC0TtByWaD1byydYFws -vhD76bIYihX0HRtNA7W0OeqzEanv7TOMiwlJgo6UKsj9QsvsY6TfO5nIRSbw8sFF -fDqimCNuv9snpA9oJ8CbpUmScrpi0lTn8qG27BL1kMc5/z4/1AU+wESQ4s21nRuq -k0tC6A0+xfViPt5bh//jfn0y/TwHZHWwPbtjDmOE0z5JiFoF7/w4eNI8IL96p2IR -fjQldRPZ/VjQKZSpJa5NnNCfdPmc3y4sv904RPld3m72sTQ4Yql+XSf7oNCSU+Jj -iZif0xhZQ1JQq3qfM8Y7Vm++1Jl1wpqSUpehyN4NuBzq4pjtfmLOqyQPbw4GgyPc -J60EBy/JIuV6qtE23BQY6zEf6V3ZKlw4ZOuoYnWaHNMq9mXD/aOEYX2M49vpYiof -oT1aFA7v9XuXJy4nz4WW+FEx/JwbI7VT1o52SQeT5ndTOzC7w/IJeUmhkZ7/Nk0p -fXL+LLpPktIRYtfwrCJLTstiIsP45Q9nwJT+rQZ5ToBEP1zKgUgCmhsOOuxXgTdv -XWIqPZq3VTrSTSM4LD0YkJL+oDDwP4lbF/qmMfDsgvoi+p0WFQ7OlsQkvB/UeLmC -CgPBOQmZSQ290SXxEjYCQ7B6PKv6/ItureWob44JgEuUEpIb0NmQYqNTOcl8xKJ1 -xdMsDjYOOOCmlCBuWxVKK5d9S0vYmdPNnrd3W8UANO2uconxTRTxgxDNlGsmlCP4 -qdOJ+EwbazGO53ntEzgVzRT18dsPHlIWt5CNbLI2UUUM0Msycdlvtik0bZOQS0w/ -LkyF2+Dth2PC+h4/zcpfcl27nPtaHqlkG/WMlPC+BOM5yxo17cQn4ZvMt2kRrzGu -igcQCImwly5LjwDNhmO9kOvbq+mAJMTuWL+RS5hZx6IEI6iOAo+kirQJ/WPwEtmc -wCQnjBAwz+HEskof//eHnFpLmPecFcBwJ3sM93NRkyrRnebaItkwzocwh2s2ayQc -Z2Y+/wgu7t8RbvTekopRUNd0JCweA8QqqURzpbwkArcuX+p2Aw4BB6/LrzmYQPiF -kdznNuwuFUArvB43XVrdb9eCPNRZBUjbNqV9uRBqNvgDeNKgX4JHVgyI7SVb052a -KsrdXBoj8HHb1+XF5Nrw3TndbIFnm9UVDCfnqpRq7pbtHp/i2PH38WekImPSnbk9 -9Tt4g5/4dbpsx7Nv3TuxC0+xcKmocD9arveYWh+Y9MNjzMgMxYumGI1+ft3DLkhl -OEDwD4puesOD6yOr6trtUzi08wSmzTFoN8Q9HpkYc1ToyPkPWhN1OaxhYAKLWKvp -KP/E3Q59MMDLcN9e79czudpsjrH4qvv1rOStdp4lz4807xvXmL+2z3GYIezTIl6o -zD1U7b4+7ZHd+u53G9OLSPqekdWq5ccPuasGbbzGX60tLgmd6HBHK4p+dAOUCR8n -nNCEvFpG4Irg4wvw8TwBluBCnEY325rUYVffLWyFBwOK31LFXaqq76iOz4iYf38f -RheRmg1qdGjhfiAtHFirpVBslSufXept2BRHwgvsJssheh2xk5+sL++4cf0MO4Th -rLOgMCzybmLZOnRE/9740c+TAgG8irFsKrUfdWQAEy7S0WzDkT+CjKaQRfx8gi1z -ypB8Oaj6GjyHFgPBC7uL9QCo5jZ4/pLO9ANmuU1pYyfufzvcxSLX8431ndDFRVUW -iM8yeQEn43LoeA28UvAA2q08KAISzzNq0/EBayFSJydt4eiE/aWX1Ij8qigmbXZn -gTIaiL+p46/NbQkNS/EBL/V5xaFCMFM0qu/2TAiJgepoxhGz30GSU+ZSIE3XN41d -CLEAyqQEEA8JCMbwLqA6bACbeGoZVVuHMhg8HyqQSne0NsDrVbP4wVm/CQRt5u0M -4VXuiQK4BI7FttXr8cNAGcFxVTrcjFPCw/SdjVmkY9fZS0hb7ZAc/rqszOhfYw+Z -ANsKaqzFnArE3bzL4l8LRWmy5xOaFxLVuVEOLk5lsi6Hq2MafkSFqGMWAT3AOlYZ -B4Qgg8xxBGM1XUVGgakEK8vLRvtiOn9/mD4ToONYJ464NpnFDKvdvzAZOhHQY+KB -509U6ifwn1AxN648HWhKz2xibG4HSXbAWGDBKw9Uzo0yQ447jQy0Bd5D/ivmYV7J -Yc6qvzSEI8HCr4DYQvSR59HMITlj0RMuFpeAxe3Ngq91paFAOHhZMAsiZ9zigfdZ -6hoMqOujYGmzm7TbtUorQUok7quUFPhPy3A8+O29lakJ63nNqTj2oVMxh9E2i2ue -oMC4QdVuirxWJJDcHofyimlXqtK6TruqeuUew6XNjX0F7o0HGJ6fXxLSh7OfiTH8 -NvvFggzHxo076UVJNxh/fZH7X9gGmyqeeGOIBSoy+30OFW0CjpuN0R26JuSouht/ -nEO+AopDP5SIImFpfJsXGH3qf1Gk8EMWLcQMT81IXcbZeAG/SWF7HX6KCbluLYqS -J+GhkUVVJCiKWG5oJAaEUitmqCjS6y6CoZrctM0tLIjVwGfa9Tn+ohq45xbZ9tMu -pPdubPp+dqMmioqBU2FRzwiRPOCVSo8H7hYVQk0Eg/anBZlI9Qu07i63REK7WICx -q1JXUpbeEsa/tIB8tcmLydqTH0iUFHwsQFeC5rgwnnzcOlPvGTMD+BxZuL1ghsT2 -8rxcrwFy0/N77jAkv8iwqrzUj4AmGMl7kX3DWkn3VhueVtlNr43FiALwu1hWE6Q6 -w/97JArGodemFPyH13MyY5L2rIB7rBR/2CSyXZkBmOf/hxFAS9/OPTadOk5TjcCs -uliQgWEfqy3RC7HtIoNkRVDZ6neO2D/Zu8ZltAy8m5Iv3ZQmsFbzosuFQ4z4wpLn -gGy9L42pnaFrZTACTk3Yr9MU3eTHaLDmMmuyCdEFXmUm8ReJm3sIAAH5dqUgdSMw -Mr+QtdZsexWm37jKSkNVZ7LvFuBaXLUfDt1x3SPOnLxAi356Wx5rJpZ63WoaNgXJ -vzXoQ2mxWUduAzbbh3t4/4n3bsiA9q/RDuMsHuZgwvzgGGSuIHESUl5fybcKp8eb -WoCSk4Fnvw/OhtOpnTMejHD7Z+g72w1u1WzZkKLYKARAje2x5kmm+v+hw86JxvbJ -lONga18B79yGyNiaosA+62vukWI/eDc0/QYAV1jTONon7IpUpZIUTJqkccXYPR0w -jdl9QNBMLoY= +U2FsdGVkX1/dQmXuudOK9uJaqWJeTP647AagRJezWwFRQK4EkCJEBSRNfmQQgjKc +DFfnSa9YpJ4zSe+ecwfWZaqZUryyHC50YoGjDfqcRNmJ7Jw7vKLGqsM/KidA7IPx +6Kt1r14wwKAbZ3VadYWLnKk+CwGEMHq65wLSrP7GMVSI59+02/WpJuEjjfAXFb29 +t78+d5GTXzNDK6VgjAB908YIMfgmSeVfAP4IeqH9PaqoP0ExBYSgV/TiQa3L3nQ+ +Js0EwUsfroEk6/t9CfbJl6ZGLNPPZJlIyAAHmd7B+MKgb6b4YsBlc1R1GgghjZp3 +d8HBAahrGwSwup4f8nJMGZkDHUFvOSfQ0lp147zhpG6DkxtOzluwJZTI9hQarwR1 +o4V//OvnYxBbXybrbh3fyDt3/r1x0f+RMjQ6nUyDIQgwNWhoqsvPoQutCes63OG7 +AwRVYbjKq7pMKPWn7KOttgBJH1Bqka5TPZ9sEecSkB+wz7GvQgUrbpBYCDPcR+TS +a2WBArd+OzSYJHCj451kobTzATKPwLS2Tw0N93YG9zZ0738dydcIV7WQxQC/vtQ/ +OGQ6/ttLptEffWkICNkWpRLUsdB2Ih8HW7i9q35ynYmxDollbi7dRzulJiqbGre8 +yUL/a7qLgSD0cwIR3tZeV67YxCKeMP03fWEhrqkQwDH3tK+fYe/YeSskbqvlW4C6 +450ekOKq/9XrtenW5UZSesulndB3eqai1RdfDEr42MtWrYAV4cG7aPpy1E8JQkbs +xo3urfYeUDl7hpe+i507fLkoio1T24E1LpL1ubnw9YVicqR6kqGvuf6PfoeOHX3I +mlc7L4X5wfOKIxY8TTPjGUiCgAWQd/AfYDyo+X4KqST2EMUgevWbbwdCwxyenRcS ++8QKwCEvkmXMyeH8NrOVlji4RJAi/NUPmdtgSgKLi0o97VClp5B/yAm4WB6mJ2DT +GZs/FN8N2z7Xw7dODphsAcKwbZjyXALRJ989+Xeh+q46UNumNjePtrpzvYvS2hYA +qk917ml6hRyPiSufCbvtDGeOI0rK4T1Rur1cSx7VknePudzfnG9wbKuJ5Q38ZlKX +dOBRCpTmtthdBG1iCJ9o1av+QTud3+/r9/4L+3y9H/HA64iaU14EUs/rAma3Vyp+ +BHfnCOuZRkohEl/BCpVB6MwqK+H6g2Axh7Z8+HgsNokBgWub2/wmdeRb15Mxt77c +iGizz55NkdJ/Qv6QSzxVbKDG1aH/zhSMo0hIZzYJZ1VSIJ/YfFzmHDpRKAc9UnKK +RoPYzJBjyJ3vDanvgvhTtnTpmvxgwUodA4sEexHw2UTP3EtClj8Pku4zJnNB41Ah +Pkf5v7ondEwaBdzKb0z9MWC01CdnnJlNfjfYvwiQJ+R/2ziyhybibvm0Lwb8zTy6 +Lk8aF84G+t2zyLVhikQWfjlQ1rIZyT9WHGYGC2DtuM7fextilmTFx9ZAEtVZPQLk +PY+xVTL6oUpu9xF22NDAdkiDT2OBErzRB8nzjSBp79fsPFnmgBTdsCUYaNRc3VK5 +4dkEkEsWSDH/g9C9gdHrzUer25LPPhHNRS31z/DT1lZaZX3zcmBIXY9Qx1clwmqt +CA1gGd8sOnrM45JNzPuL45ZT1qfhjHpW08y/n4e2g3Kc1A2u2Yxq9YYjpdvXzAhR +OHeR+Upht1NN0paX3T71upRlZoIIhY0/81DPippQg5NjNr9R8M5TQ/93oZ9DFH4+ +36qdcuYkqfUwAc8dT9bMFMG3pPKdvqNs+3IQ/390P/DJtnBYeRkIw8VSiGTV61fr +F8nwihZD/LKWitOSJny1MRHBe+UOgS8q9vk0NiPoPMEKZ3YJefCpOwh0/mnfDb6E +EQjec9XOQeu6iY/s53XccJFF7UQIL/zo5EPcLWkabOQPXSf5w4HeCJi24I/wv/wZ +Hd2rxa6hAAmSMbdwFjdAEd4Pqzd+mjztEaXLmxz6m6IOc5aElLAYd8Cz/5/oArPk +6R2CntX0Kd3HfPLDAuE0E1t9jV/3KgT6BV+x+bqIzIo5TCx8vE4Wy8ryxyRomk00 +FF86CZSzBKwNE2sPrE1fIVZtifn5xc7SCpxaopaN6FK4t+R1Shcio6tpMDG8/t0F +TY/oIDuxxtYIvs6C/eGs2TxbxvjOls0VdZOb00C0oPkIM58TOi815GcdK+EhxuNk +sM5YSh1zHjR868A+dHEinYLfKHPFyKTr2l97rxThl5T2i+jnbxiF7jS3p/XNCFoG +yyqJFUXTws3xPH5EaS4PEhURu7uxrJteie6bGPB0qDb1QxZLyRNaw4zdj6Uf3Olq +FAgnCbbHm/MMUEWWzZ4K7O1A3IZPwy81FfylKjs2rdZW/Nho0fxPmo6oI1VeQOB6 +M8b9anBNtBEW537L+3noJecitGdRreUijS54qrrQOAV/1C2tpIQlsF9V+JbbFV9P +9KDnYcDi7HmPydehAXHITMjhaArmuNfJ3WGBEV9T8Cjan1R9NQtmUHMEuxfCHf2M +RXfb/Rn22iNsdYmBELJJ6hF5PdKp+Wi7R2tY4auWYCpAlqp2qjy6x7miTCXrf4QV +Rz1gHWn71P1FyfTgap1M4ICqdmZKFPUIrLJqV/on9SPT/KPM5q+W9sLiXo+K7K51 +nbNvatChQ2bTe+tSFZ2d7NUTCSDXH1nHK4DtlWQ+FEVCNZrqAxbqhFCSPvAlTuz9 +eK+HkTvvm+JXGBJLdT8VHvffNBLfakkLf6OqTqLu/b5O7gB7A17mYxX0ifqFNvrB +51x4dPmhu1vs63Y8fo1vaGEOx4BrS08HZCFjP0jPWjemaffSCqaO3kDgi2AmdCxt +5M5JEbp9U05aBoD+q5KsgoTR64sHadhZx6zA+hqNMzsRCG4eKTKFchdbgpjENrWw +KzKNuv7CXDtdGLIcZW6upozo4z0WbcsTHQ7G1kH6YFXZnrxHXFHrHUMnNaBGf8MX +hvcnZb2w7m1s+gN6frHnHGwJq+l9dHXVOWwHGbiqbiWqXzZh6SrfRVFfbd6ed4aS +9E1VzfWCG4xdhRlEPAqUBvkDbDsxoaj1vXxwPwwqaKRBbrbCJ8NPuXO9b6TC0yEG +74h/DMtumEIVLXTwFWJa1au4ZPMUXaCFW5nUJnrE1hYjWdi5/ro1Cdq9eIDkWVTQ +0xZBzuXKsnNi6ysQlzTZ4GyOgiF4Tkd49nuGwgqjCPJKauCyVVBQzRxfpWYWM1tB +r6A5JWz9EMRJraXwFkcKwP+olG5/CsO6eF+A/0mwjdQlFlQX7x7meLhdpTT8wV8W +/yWqmzhSR5s9su/Gd/q4hdWBYbbiEQYUWrQiUUSLUHwyuF4bMSTHZOcL7YOjTRjt +SQO9/th8EWrs+X8qV4ElDlEEXWjLcmZ8juPfpCApkiGxmc+31W8IvvrODH/KLuJ6 +29PYVfgugaGCEgIU15xNHLgSWO7IvHHZhXb2C93ykVlabee393TeoNGpRZH7TFRA +3mZPz6OlQoApvkyrmrlSWajCLfXFlWAspy2NQAValZsaQhzRNM54DB9XV/RWoHob +26lrCL5iqTs3POXZdPbZHVzC8scugXCGUdVcVKEG/fnAuUl5HE5AVpxt4m/wPj1l +CMo4G7U7DrQo/+161VlnY6zELsJL/8MVeMIzoEfWe+Uw6uroNFNg04ZCdS6IacYc +rKwzPZJjCIQq0n/NLlJzLgQFrzf1Hy9NyHfRFcguUegAIcOv5rTBlO4P6OBSdtwl +3JgU4enrHHKkans9S2Vkt5CZ5smdjv1DUSG02QTEpGmYhuJGxTnUwymGpmPePDtD +o8xLwLrb3zi0ht34dGWenzBFX9QeEAFEAyA9SMrwPTRw6/xL5zIkgRnC1yhZSWz9 +eQYnQ83yQIQQ/mrOrTOBp+YW3AL54Kj3VgxsXRIyqjRMX9Z9FrnNB3b+eHbRbiEo +5al4U1wEgRfCu5mSqq3wDROE1Kgsv/0/Ju4NIwatfSxV4S7l2NfLhY4gbPcx+qMU +CKNnLldJBgl7HappKb62MMWN1xx32ORwfQ6Dnwe5utZiidfn27lNbGjZLKXGBoWj +C+/jyoDuxh3vvNuW6Zfca2kYhKcf78xG5sMdRBB56a5yDBdvjmM+TLhNRdfL0bch +varecctAk3wKeO56Hstg3xjmjF3ItdoCiQs3JgJPUcoACGsvnWILZKKCJoa7/Z/U +XweFl9703N9qk11Siq9NFiIuzHsauNkPprAQ8uamC1JV3dm7L1fNVOBbCvTkkZ0K +NOwIrx//OAXPyqlLR9SJ30LkgzvmbtftyYvpMIPqMVgq7qjmebe3YdonIzoAJPwq +ggfaub3yZNLucKwZgiOqdW280b6k8SEuDDtGYsj9v5NgwKrpaeQ2z7oZPiTUMeCJ +yNC/MQqhXC37IuKbLywRKipHrcNKpz2v3bLpmPKIztWTVGGy52+HeR8NR98LMucd +qnZsaGJyi/7ShBzPI+VR2KQU6ibIBsnjCF7hqV1wZVX+D3zSCRXsimlEncHLjDWg +9xvWNuhuyYBqSZYyFVz1N9MjY0QGH5SQWC9T359LTyyHR+E02ClCBITfBduHEqcV +yMpNUE0dXDLU3lxx5BiiswFfEnFtqzKaXNbrFWCy/Dn3QiXw/2nbpXG7ZvyVR3CW +FKEF/xtIn25zhgTL0o8eTA8sv5E25WHhP1+UKtiP3j3rWy+6alRpa+OAcDWWzEd9 +9spR9Hy7iYuWNHget5nsxodxttKduPaA0MH/aM2N1b/dU1/gC3bTX7ConfDWNTlX +Oc2ed4frD8vrkmeQ+12nkWPR5wOm9ZRoK6tJEbVBBcdZhVhzZxXC7RX+oU8Vpv3o +KlXGgMBL1VzXNJpTSzT6QK1gDNx3wt1Jh8wPsBvfkyV7/pTIRys2JVFkRN8AdG2D +lTD++e05b8Y5dRNUdC5fZFqqi1CnYXq1rRME6Tscqce/dBVLl0TGhVcdAhTlmvgO +l92H5adIPVav6hsGMtlkEj9zhp9O+OcZj2Va51ypvJoDMr8Ks1QEr/gTieRNcgtc +yDB4g+i1d0jb4JPdLuyEkj49fQgJyOgFbHS0dInMqH8b8zq2kFbt6Woj7IEktkI1 +T7JhHovq+aL22kelfiqDaLmGz8Ah5uT/TUL9eGJxfMf8xCxLt1mERbuMNjjUQpjm +jNRxzPhZwT5zrFllxw0+hN7rex6f4r1xhdWvxMVZs1pbbQMU48UFN8i7ANYx8qC/ +oHbJ//N528Q8VhF56Kr8W51bg5rhPIUcGWSHmUKhWeuXXrBSPC7+7/sXRyxfxXml +kqotKcGVd+Vmbhd1jt3oVzu1FqL0udJeEkJuWdRlPSJgb+f3nVScLDw4wVLLeM5X +2159lwvblqqtB7BiNiEDEeI2Jy4Fs8YxY2YNp2/ldpJc7jKB6VQ+H9SRn5wpQifF +TOoLO8C/ubwoNYSEO/UePuYVLDFX4KsPnZ2hY89N31RBBdhrOgi9oLNxQfOzWzpP +RMpjX9a6aXRGfqGGQtvDi2sWgfLvN8iQf/slB+z2mckdbamtbxOry1riBOVjdeaO +ksXhh6fq3wa6VFhRYkyM67zo/gdwAFDAwmcHX/UEeFqlhDqT6z2UTDCr3YBTBeca +QddvuY0tGI1ql+TQ5GCyBAXUkUR2fZEQ3aVYbOHs+YXyMH6iLfgLLo0Kuv0cgAOS +huJ62BtfhkXzBD5KlYMNBS6MBkKFwcwOQ2corgG+ViRS2IreIQlKMIAt4sn9zs1g +RsJVJ13Z4KOP5b29C/NhMQRNzSOFHFewc2uNvuNuI74IQpZFAVRwDm7kxIdOzDNu +8Mb1jzbLLsKGfqRpCxEZPV+AuIRnMdog2+sCnkMDvbdIgulHT/S4WHx290dO2HrV +UOf41Tc5QcWpDwnG2cKLMkweIdh6HeoW7y69FRDqFho4dLTHTPvc/a37t6SVFF3K +0O1vyDLK5F39GTdW6ROdWjd4IdjQ4R2qtLoxUM+gwyz9J782/AlXcSuc+8p0XnLA +nxHCwf6AmNgoDmI7a+WQiaqWlYwtVfscwnjREqvAXCtqqTkEg8wWOhVFIjWqcPwp +1fmzo+XsSDY+uoAXMtdSIyAFjhsmP5XZXASuvDR9htX/1iKI2imJGj+KilYMmbDu +wuqfLtLTytV7WOjI5W6qry5xYlYmcdM586WXEro4p4A/6droqsw58czGxPtpgS1p +k3cWM8qNQk4DflOR3deaq6nu3wUujnt1QA3jmTQMGSnVbVV/W7kWcLeglsYfr3Lq +9lILrj7TTaeGMsJlCN2TDN74eHCnwdjsJDkl8A+Te0TZm+HctwAIXTurvM2O9CY/ +bZc8NfQyVyVBqkzFViXrsMQ7/s5fMliIJRx8VlXAwl0Td5GgwQKP0hLILZG0wpDA +rS/u47IY0LYwXMXY254dQGXUdWtPUnkjb2EpdMbHsfX64NyBff3N7kv4wJENpktz +4XFtT8F7CCldM1DS8RcejeM4KuJx3FzQyabocKdTbJ9m6g9AUclXDz49HoeCw5+9 +nQlkzf0nlvYJNaMzsxCVpG9nZVfejFLqNilVH9TEA5wSHyRqY4WXBpI7ypUlDrQS +plFGaGHP2cX+d+36ZfQ2ZU0nizTFL0aLB5uDhKDa2BJj4Pp8aPKfhb4Ea8+dnx6S +QnSduizbVtfEQXoyLu0ICQcE/p3u+9FazKF7QEgQzkeI3MahoyZIfDK9A/5gDv1e +WEvttwvk+ax7EnWRx/W0GUiCHE/L7qV98+zErMeklcbSWwhI5CuqUfwq/tAyRcv9 +faNKX1+o+IuF7tCApQmiZ5xTqvnatM0VAi/HuIoVqtF3V/NAEKQ79rrQcXW30l48 +tGTWNo8+BGXhFYxs6O6B/RBGaMEnWXGX4Eel9k3+IngoDK0BP+fDn8Zwt2bYo9qF +Q/aFFPKItfWSXQ8t/ITtMaNRkEPgwP2O98NTh14bToOaIkwQqKIA1484ajya5dVG +HFJ1LbauzJiEKXv9xIPXxGFqcnkxPdmnnlfpitvvJyptbOvfUIXid8SpOF8za28g +HrXhw9At7t7uysI6t7KC03Q1pMuT7vH4VHyqQHPbTLnHaWhSPG6lv1XgOpax2Tsj +y8zoj80GlOEyJmCN3+aEFBXzmsadz8u3/2doqO0WdxTZS4+HIk4qOUaUk7LDxBOq +qEM8KioAAE61lDc8wTtjB4r4iSCkwOu4/zoaYoTbBRPptO1fbVDUUI9JIo8v7F4B ++i4fAZYBd4De6TUnaSVptHanh8IRI+QZh2mKFtQVI4dNg0kUFmCXR/dQD6UflZ7s +uzOzjKOLwM9AmsuFJh2k6Q7oe1dy4wsggfjlOJz+JwbItiKCx9c0Tr3hGf4M8Xay +EXVPWdGvRZDFOM/aVmYDrMErJNPxUtnVQBAzNUgnMqxdEFHhg9SkUHMai6hBBACH +NU1YO7tj4FJRLywfu+YmNJAV3rUecmytfkXbQLzXqIEE01bQliQ3e5n58CyBoPMm +bL4p0BRhEiA6hw2jex4c4u30sl8CaGnj6VRVpyTcMyNO8Ge72C58RUFSbm85vHpH +QBFlPs2eGbyCw3qODZgkXUte3CGWu1EBK9ri3h6PFgfsOz7euKrw0+pc5K4jfkgy +IGkOODvNT2rx39YPR8T2IEw3tyWKxJkuxydo9QOV3ElUPwkvhoh6My10LKQNkXNM +q+ExXJJ+iHLVHOk9e9JNI5fL1rU8I7jCL4V9RQGc6wuAwqp3BzUk/W8Pa+COTzgN +qD5am21mQa0qpb9XfNqehs8aINRhi2sTpm+g5xkZfDJxQGyI8qaMWCwLoMPe0nZU +dOOimc2QRZ215O5J4WIaAjrOmrBQqzcHvrISmJ4nL6Gfph+C0cWk7mx2QEWD3zYr +pvU92KSK5rt+bS5g+MAdrI7NNI5M7dPooFd1xSZ0ZI/UEuerH7bZjM87Oho3GKtd +fySGNv6YnGDx9/EKYRZtpxX0FhHkXOdPtDenO7ROdbqomAaQ0zVZIPXfwPYgOKrQ +vIGdscOkcChjdCQSigPYieiR9Z9Nawg5auUI20seu2/J5yRNgKsDANNV26H2lHNt +X8O9CL/trV4BELP7Qh6/2bPJ5V5cyzk6sj3quzxOvvenclfXy3KOqIiDmeR7qbWv +1uK7wgtgl0WUHwN01PSPCaUEb6jrYPJUuusm6ZDheXelfit6KtdXWUTsQxKDCIXL +LPpFtYPCVH08ds5+YvZ6bOKPbpSVH63ixVFlMpQl494OzjcvbGnOx5IkBXKKD2pG +ONnSnr3FeBkrI5jd3uqFVZ5gQ0I7VRfhX8y/9TmAjy1wunw8VdSwWZ/ds8ZNNUfM +TfEwr6mwU6vGquduyGDW/ExJKIXWBhN2g7kmxbp9m72FxTs9NEP+cWETkxL3L74P +bQMRKo7hLpMRgibuHJYu7/6ixoylIp9LOZcMqVLdnRAzEC94HGOSogmWVfxScmUJ +z/9iot0ikASs1V9HXf3GMtuoQ/LEFEHpr/a6BkRyt28tuC8wqtAPlotln5K9rmLL +wTpPK2cfFh/5LKWFBrw+2TUGAejpw5BRCWy4E6WguFQcKT9MD4Nbk/SKLrgUKOJt +RqxTqDF20bJOePOlVuvqjJois7OZrvyvPf7fY8enCA+ETYnH8hDAfvbz7TNo8AgB +GR5FtwV7mZPGd2391KkOdr3s/WX6qHVG91bgHqsPe45gSLgpW+u4d5Wc0zVRU+a1 +HNUl/djNi4XmGAptqsar0lYnEsHgjCJx5iz3+wuqnVuVJVzpgPeMsmFW7Rr/5arO +tSBLzcGMPBHzfA8/l0+MyG4ddZj+lciQ44vDnEGiUU3xyEoDi5e/IpmjHm+e2eSo +DI1WTPmGZ/PFEFp9DTvDt+SXmI3YjJjnGHtSOOvNMgD3a0hR8OwmtSAAerSkYq0b +NRCAoTMPQP4LezSuwsNn092nTVgGr/MFNfKwiJcIQU/ZFa3kvIa9T4aUICw6Rd9+ +p6pUz1rP+S8xx1Ipb/fHcIp4+XYSEIto17V2azovu+0tzlc6MAXetJ2Vwq0O+pZp +S9fj8mO085WuxVR3TEGQS2dKseSFNCn5FsQRiA5iAfxH6zxbTK4oLazFpT9ovO5I +3sE2KAIkZzfg5Yt9erWmI5dN4nqXj/jQuJ0HcjdNZOY+NIxeGnBC3TxbgspA87Sy +nUqIUshyNd7qTLJ6oLLNFYm5LdvYwXgLpKYq/fCIGSu1zUlLqq6YsVTgQoZ1PF+k +PfPBOTU0AMCbuUD2LUOq6Yp1H1USBkUPeKuBnyYx3flF6TNlz2fK1Bl4cbUt4WZt +9kd1i6Itvp0mBQni1Tx29KQvtdzy6ffX9aqkEjh7N6A82OwSWI1naJi++rMiW4Dq +BOeeJV1i0f1JEZndVSxHpr7GVuEWK5XDAJAeDfwp1xFPf1Ct+/rdJISiToQR2agM +SDkluezwjUb3UgELiGiXxhTNvDaMQ9sdix2hRXg0PJOrtC86HuX4PNnClIzagJEv +Vzb0pLO6jU0zkI2fcVT7ooldy03on1+3S1Enm3OHV2QApQJgcKtk4+v3qQBu80jq +kNPuUDJUrER1nxT3Ehf3cOZ3Z5p8HqTtKKC4Br8jV4Qs/kbhXKvedtjadSU7U4je +fsp7LimVozF6YA1bpitff0ejWCITA9c17A9Yv3IYQU49XLNxooJ7IlFTPmmjGdkU +V2ubbmwkAoHyy19FySqsuDUwCQ6S4Lb3Lso0kxprNP0w8QP6PVbUT3qFDdvn2hg7 +GSQBafoIQDhYG2/tOKWpayLSro/iFHSlHfoKYei2UBi5ym4qFQ0Bgc/A3i9eYXsY +3nAbi8xla5OEhoLOmxTN6DAqGAWVlKDPpYrWR6DTKjG2yONG3OY00xUhy8jAmRpO +rgxyqygHPFcBpUjTwqQE9LvIj8af2BIlI8iRcqo3vqwPRbuwTwNkKFVgyWhXoTEV +JUGcdDXHkYC1NUHBOs9lYLR20MuHaa8ue9iO7b7eI4sRxwJT8byUG8ZfoUTP6har +QMCddHAVbNKcQMbKDOb2C4UpX+cMGxuPHRCupGxX4yL+lWyuEYvl3XwE5Xoo9yjL +ZrVosHwHNRZ28/xLK1ZwFo1crsDO28s6E/hiw2wORGALvrMEpE9872RFuhUmSKG+ +/pTnHEEmGKY+8Y/ZqSf8KfNE5lbvH2wXon32NdX9M8lheXfYAOYXgi2Z4x00Ep9D +vRzqxJOZrIqwmwZ2ILot94oqQhki5Y2+THfUCBc7IG0IL/pxm1GaZhvJxjC5n/xn +P9eS2W1CEIJUeQtSJCjumTNFIWwvJST1AVWt5GVjqFlVT1qqciQM63Rp2OU+7DMz +/CfV9egGgMW5q1dLcLVkyhbDBrg0lqzTHs7wOot1P2DPwZK9wi2LW13q8ZGWsISB +16LjJDeQlRnUkT5tVrse8UnKBsujm9Dy+NOnkP8rK97FD4mHgGDu2+vDz2g6kiYg +oMEuUvxNCnlUM2oa/mclef7ULysMfznu8/ANkWikuEi1BDIhj5ww/8R2N0nwb8Bs +rpRBK/ZojohyGfc1K5iLN81ACHEIy7BcGpYyutkSFDVlUhLKz1+bgB8ZV7VJtFYT +diJu+5yB4PcHNQsF+9RJOQuq5qYEfVVP4JF5k6degKNPUvHqJSgDGL8WC/eiCCkq +MbR9ueZwkHW0LCZ2WGd8Xb920PAAJic8tpvDFMyogZ0OmdwZhPlI8kUfV/STqmmx +N22irC+BB4E0oShiWoUK4sqE8vloqEOLcjB4ijaCNhkBhEtA72jLCMngidMChcqq ++92tHWKKgNccDBF2yaydlreEwhB/bx7yzxA+e/kCr6tPxb3JeuOLvbwvG34D5x/5 +MjRs51RmVO/JE/vsob67PosrylzJW/NR94rDP/knxEzb183sRWsAEUffBShWFd9s +ee7X+4bHBraNz62P1CinP9I89j5+QFXHWAtf6I4cP5RLyhYnI6yFlvU8i8AM1yap +visnzlywV4zf0K9SpxqpX0a48kWUDLwdK+k/or15jziQDQ9lXTDhjuGvK8eDc39m +ZZw56RcKeu9EeD9DnAzisKEucmKMrkbMLUi+huQMuPRKlE4vIUR+/L57BR3D2fN8 +BTp2tGIidDzvYCndK58KfitmjFHaU9rI6m1UqfkZBQ2wt9JIbFBQ9LWsSKTaEs0h +usPT48tVzcxpz0u5ED9opj5XRa76YkvE8pZjHGQvYytx68M6VmjZXPJgVVByBWEc +xoXBSFBYr/OrreZludUEPbSbjWVtKpwm0V/Hv/7xO/TrWpxZR3Baz55EteTrP+ia +LPxKf2DC/p92jor0ZC3qKvXf549W4rtwU8jfX2WnN0WzH7ETjqzitjl+RFH01Ocy +Ji0xN7W+3OSKebR9xkXCnyHvnwRBmvJtaAh/8uW9KNX9FiUkABrmFzt7T6luri6B +cUnyD3hjwTQy00dEZd8XDqPE/AP2QkNtwl+FE4sAhGosrk4OEqvnv7adhvftHAY1 +2FoRlF3UGLe8WLEbvx1Gd72o9wUXPjhNf//Xd6tgB5nyzzjLOkylJdxyJip7Qu3b +8dkwO5aWtPMk/fJBd05+KAK1RjkWWvGrfm3TU17/e8OqS4NCwa44AmiR9hTOnit5 +0jTAvSidad32EYzrwlsG7bHuOgNT0+Y2OyJI7CMx5j9zdp17h3cBCzJ2eXaEWD7F +28ahJCryBSPODec2Gq1vkC/OQPvUIltfdz4P4RpksUbnZPYRq+yIa1Fnx0Gd9qea +9upeTFiQmqX7FgybQPdeGQMgLO+AaDNtgDuMmwG2tRqtxoVBA8X9xK8zvx0Nczbf +fB9CmqiD/vx8s9fI2HOVULzbpOFgZdGK4vY1G2IEvE/9hS08uoyH6uuUWDC/5fuM +lrMVUju2iWgtz74XiPRmU7h2CPsKJtPD16qKlscBEvd6Jm1F/cAbkO1MR3kZJLVO +Z3bFSbchpdedqAM4pYsLSKHVG30Hi9U9pDz3P5L0Z+wlecj8alXT9sFxwGJX9oNG +TbEzxy+9ftrTVb1C5IWFYRoi3QBrF9idebLQA69W6u/xtjLfDcP/GhElKagdyr6H +wvCzFQMyqDMa2soy6u7z5hjxohSYIlMDvx9SYm7WExUMzCu+gl+ZbgwQDi1ezKVf +dZ2FYTEs8L0CNIuptxVFV3IRPK8DD45thxodBsBv6+QYBKisYHzo+Cd8Yb/t0saE +ikDoLIqNRgDBjdT3M3lvFYaPEzUtKyRTC64FjY4xuSIlGFUJUA9JMGTGZWjV7oWQ +7W86VEtoPVa4WJ0dYN0pxbpseBqybMTMuHfAayv2B3R4MXMoiTmKq6uxdMKMdIvU +JUXlHmFpttAJRRG3/KiRzl/KCeaMQBxeNk2iPdCSM2ZQJiLUgYmvcqblqrOYtdmW +sqkYLBrJKf4wXxqEDVYvW6/1cfOxLuKSUU9zwXmMpyY4NZmNPicNt7VmAN1n21he +S30GZoo69B8sK0pQJVL4G16yn/H+I/eqqkMkmqITpaUmMY90EaxhN7172EheTbkt +OJdUVqEsHnd1qMPvMral4ybsnBiVJCJX9D17AHOKuAu2LYBy6WgH+akZEWY7KBy+ +yhwtlqBrHAfRw83LXCKavpUZ+U+KfXcmdiewdzLDlYryRKamCMVbuoFNLLX/EnDU +eEz3fDocdTT30ckju+/5LS2O4Puqqf27aE2+aWqfpfdueLCfS5TiGYlyYzMsaZim +X14b45BZd9JnPlVbs1DWrQvw2Z0L9VjGPRlbKoyb+m+uso5SUf12YVV4+foGKn+k +m2l1PkVPCpvFG+CAWLq1S5rV6SgyWa7li4oLlJrAYPB2266ZtNz+zK4pvlIcMNGo +YqWsDe91NNeOik90V4W3b17/SbA3kEBFSLgyTmyW/IIoXtyDSsXKWRFt/3DwlMwb +8yKkGLelowUoCi18q9TabGq6AF4M7+daBWkJ270ikxmwTLLEESm7p7T5ngSa9lXG +pqXqZribQOL5vcY4gVyPnk2m5cI1/7PlqrT2Kn4R9g1M2A+ONrAYmx5ztz7csckb +5nZvcDJF0XFgVkOAi4c64delaeKm4Ks04rjU4ZfOzNIRoa7wMXeO0tRMni6Zr7VM +zVjWDfkUnV76MAnVNcQg00nvSsDsiTE06OIk5ksHc8GRV9Pz9Rngw/YhdbVz/bp9 +1PfDhknB56HELIRmiwWuJf1DVGO5lMg3cx3LISEE1Jz34b8dV1jEaOGYQgSYqv+Z +QuxxVftBRjwbF+dMc7uEVPWyBiWE+xoyioOXeFsAkFNrnGADvyki1qopQPMOll5I +fqbjR6UwDLntTWTvU162VdtrI1dEZPsBpySFuO7o9bmPgF+sQGYQpvrzqdhr8APM +CFZVyUqsmp6PyPRmLxB3nCVk7vIv19i+Bjt3iwLwH/5dzc9AaLnleIVwWbKvz1ch +FHca5OqqSU6hux9nPtl1ES/4Ysex4BKZ868pgFdzHRnssTKvkWEpbH+6FlOYS7i4 +CHEwyDJ5yUpwMmCEWI4V4PvLkCZZZfWau1lTgF4gKlUVELhKjUJV//wunR9fa6B7 +2Ujpcpx4WzoV6IacU/A/POc5K3LAFL8TNq2NMmLp7NNZWNt1ldREMLuJK4bTF5+p +AJu+gnNUWMGfSQNr1cURMvQFBZqcmwgkqz3/x1nwh1FNiiUCMQgsPxXmFFY/j9rn +/KpvLw5CgxBohPT/H7ESvTJqUg0ojbKBT1wJhwseSH0XfC27SScnNe5wLglxiIQC +Nqz/erFNTQSn5i0/1MqUViANl/0Tz/BVrDYu0rkr+BmLkrBhM7ehoJ2rk9nI+VKi +7FEigsjcUmY+AoD3Bnmfwc1Y4ts6W7vI9Acz8Gu8uREUNUrkzhqT23zuZFK6ny8e +GR/X+zsonScEgj39kXavhTfFZIFDqhsR4NlBEQDDT/NizQuLDb5KGVqWLeqbW/Nh +TNrdv82e2SJN0W2Kt9KX9u0rr0UIFrpJ4zSM2ZDRNpKv8S5t7gTsU5z4QVad0Ef7 +G0FP2mp5Eu193XyAlLSuapscqBGuScMjqdUMyeePlAxkC/P3APsddAGNKA0TxU7H +/pm0+XvQvl1LPqyovkSP5k4e0K1wblKsRIye6ev+Riey8ZqQh+x8BwuP4E8e98yn +4y3Nh09GQ28pj/bTGgktHOqoB6TWNuAtICIG26yLL9GpJpRKK4uNo83JhIpsKYCk +6KRA61MfsJP/72kW0acvNdFxyt6GLoUllPmQN7GKsomGG/wZGrUoKZKnUNCU9PlT +b3oKPjwntp3Ula8bm/qD4+FKlDRtfcrbHERJo5uBTvMmtocpgoYDS6ICVPr26cWE +CjiVH7hFGXG3tX9tPbbYArg+Rn+YrA8DH4NQdBFvs+0T7tWD5XNNcW8WyBbxxs4h +ifDDz1OeT7qy9+u94/faPjlDt8kiRx9dxWpDJ6l+Y/+QzA/l4PSTfOpdrkwgwg4l +0fFxI9yeoP1q6gLlNdixcvUwK5mY6saqSkeiPl1FDV/j3YRVq6q8MWYuMUzeLIdL +VOTuaOYHAhm6e9J93ZC7Nx+kQA6y6CRvM0DUFUI02kQ9rNAozSru9mRiGmkAOE2E +CXcKkllAWVHSxRt5EF8Qp84IpHGXhctImwwK2KAzIrURw/HuoqHDhtjgTX56Cexq +d+QceF2f83a2THJyG+FSy8YdMRXmJcBcJk22mHKbN2GoxnswSlvHXAlDyvylrgBe +KU+qsOzf9CV403K1X72DJNqhHvUbI/oV5SHKKuzolk6jlw93vkbhG9ELyr/Akc2I +jpd8rTpcOo5ZsAAW1Zjey2LSljqT37vtHxevlXpwuMiKmcTeG8GixP11FYaO//cn +moJuZkNNTkBtjTXFei7VTrokqtzY5y+PxEhXMtYX5DNxfYkVGZUzobTNRYZm4jhk +9Ow1aI204A5xaEIgBmQEjae09IKXTJFlbM+iDeuQwqLcif4RyFe37uS5kl7ROLrs +24YcJC7wPpgu+RZ7clTGBZwDkLi5Ch7l6czpAquYtZVdnffpeXJl9l3P4fzKs3T1 +5vHStL86MbL9Uzd6+FuSMR8JS+b6aF9qzu1K5jSmod7uvwYPNRzJUPgZ53GbDNv7 +k5Clb8orien4/0qW57A9oCPz0dunySgIK1SQ2h//Q020zWQ86S3m0/xcpK5hCOYf +uuYtrPZ8TNNAlzUcXe+8kI/PtxB0T00TCNtksXBDKkqKp9stNJ9SyCGXrOz9rsDJ +9xfwVfLoTbjKvQxGNcg08QBijtUmSAaIPdYLm427ptflUo2Qda7g0H2mG0T8TYrS +8yb53ajDUv6oFb2X7hs8dmvu24l8XOqejK3ZvVobn8wppXzRGm3t5S2FYLKXu8Bm +R6FKg7XXxME1KfrMsKfeGglv/XrK8Hwa2VQt4BqsEnhI8cOsw7pYPhx2PUbJ3Aqk +JlxWGeYFWlLc8Nim51O7h3ymLIBTlMHPrQYEL0dBWJ+Rgg4Y9NmgkZwYGhto7fk2 +0MUeinUUYTCelCxIxmESdvYqA4IybFnm0Nr0VLEtnBT37+rty+haGKCxDzPFyNdK +xV4Py/VO0LR0e4qp/yn5naLtsMqL96wTtC/CUYlu6fp9MczwXFWVy/9c2cmIJYwm +/8YMzKwJIBypZMy8tChnScZLTeotoRrg836pghgXmn3SKvPBbnoaRQxzkyGC/7Vw +pY0NpAw3+Qw/0+dxTdTWS+fIKfTdpyQfP1VU6qfshkvJRn/5fanveFZvhi73Lkw7 +Khh2XMzL4hHZNKQ3Msy8daJTiyRVB1BjwCOmSYEuWfsdj9F8Tg1KYWvcEUV52CV5 +B+BsP91tiM/0Slkl3CuXKioOL5W9qXbLSF3ERX180ayla/tG69KwmLnc5dHF/4yU +3tlAg3tun50qjd5eFgmVChloTL0P5XaV7UVzyJbxe3yvZK9sujROy5X2bduunrQ/ +w1a6v6Rc8kTZnXzOLJKcaIWPFLoXVbgA7GCZ++Wxozw//LvqMTuijVcL82jH1Slg +u/TeQDOnr84qS5rhwyzczfsYFNUqYLfcPkzrqykaWxPC6viUygqPgKIS9BL2uX3K +nXkK+dHTZSr/8zn+tirg04YJjgAMpNsJr0EINQ/nGF+xtVmmhmEMvz+AOaTETqyp +d2QehC6Wzc2h2uvKGAA7ZEArTXM7JahbH7rEmvf+O/EmJAjbpsAW7kg59xbobB6H +nUTo76mde2Av0gyujejHehqAf/+e+/xFej5suHO41Z4baUZ3uTk9d0f+AqNJV2FK +XJD1HtOdecyzulN5z5bw1BSkgzhwHbZZhW52nWX58dEDE72iV0qpau5kQDw0MJcV +d/xQ1Uarm/JVv0WYfF5ZE+wXw4Yzp5E13aYVdMzaQapi6IAzdmp7kArcIB+fPvW6 +Z8uDs/zo6aqJmsyoLWDc04Q26b9FrLGJvIaIv878WhIrwjIld82JlUoGix73gz0+ +xBMynpdDej6cPEzTh8FHIXDaU7dwddZhk+Ulxl48YOukUHN/0sEYfXCwxxvH9AAx +w6ymgw/6tByOjItiry1HZLkCmhIunNY/d7IE9wgFteaMbEdwxVZchqsPX1yHw+Uh +YxjBHL15F3pocXUTC4p+xL1gygoPvU8gLI89eWgIkR38qXnz5NQQPNv5+2/Bmt16 +JZlNI2YItNrS/NAcIu/ay+i6O9iltc3T8u0LVgqg/naMoT3QjLo2PFQZp9jpZ+AB +aRSMV4wnipz+Xum5uRP1yy2MiCcezhIsoPn9Ig3A2j2FBuJKqTVeSngR9GLTsZeL +EWaWSNvOHxHkg1jkKiNpyI4A2e+lWj63cyJQFwta9CEVNC9HJ+52Y8UrKIpD5U+C +pWHYSDRNnVEhIXLdgiFs0QHUNuannULPmYNlw3GnOAK6XQcJ8lrVJ34KOKmKWanf +CRh2q/g/9qLBlTazvFbJ0btMjfEMHr6OF07bCSGWaqIrY+4+Swf2JvXFvJMSRdaP +NXbLLYKdjqWwvp3eX4xx9qRZ3u6kW3/WbEmdnjEEStErx0I0Oki/a16rE+2jQfLG +E/SqcwP4nJcvS+umH3CHncSH9AdKTybc2FmU7232b1OkYTHg9c1VUY3Fv5DA3YOs +xwARpaQxVhUxkcg5dXiC0V8A2g80jajKQZSr61kTUzKtZVOR38fDwC1jkLbU4HpM +lKHEikoiYjn/CYAdHonsWmeQEyiv/jrN236PGDvhQRzi78YykrvwKlM3Mg52vW9o +HA0qBHiMiImxldw3M5hXpddDRn2Vh+FbWxw8wwLLA6XsZifjcCWhEpROLJI1odQI +uEh6oZVbEhFcyZiLled+2kt+Y6QchwwoQQf3e9TE2nH2MD4qwjFcZivnJ487ejjP +uX1cQsSv7+YxRejFtkVz5zELL00ylveAzbgXEC1+84j9rbskjqfvceBFwy+4iG1W +XYxBznYdoLZwixneZ/JWFaePJkqd1RSKjAxskR/2DXiUmjI5eH820u1oMPWzDXsx +kpN8AmmccoftaLw/t1yHViwM5jgMG8i9c0Hl2qKSkfJHvy12FkiHyP6hqwYynib0 +zf6Jp3pfQq/yaF6YzjGX1vm+RnS9Wtyg6aw9G4ZrtefKlg5t43/vMUlXbTUxTqlt +2QHeHFWtYD5LYqVY3h0uaMQnV+Lk1TyiPeh60fTA8tfNbOwjzWbZ4Q0y2CnnDGWN +uFW3Vy/zhTh6j2AkmAurXaAYQKXPsQG98jgagVEKPrlXUC37lKtQ3Q== From 8cd9733cc4cb63332d845938423c7d71e85d56d3 Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Tue, 10 Jan 2023 15:59:41 -0600 Subject: [PATCH 011/495] Added apropiate error message in zos_lineinfile when src is not found --- plugins/modules/zos_lineinfile.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 89e080a07..42733ed96 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -421,6 +421,11 @@ def main(): # analysis the file type ds_utils = data_set.DataSetUtils(src) + + # Check if dest/src exists + if not ds_utils.exists(): + module.fail_json(msg=f"src {src} does not exist.") + file_type = ds_utils.ds_type() if file_type == 'USS': file_type = 1 From e755609a3f03e09183fc7a216a5db434f33b419f Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Tue, 10 Jan 2023 16:07:33 -0600 Subject: [PATCH 012/495] Better error message --- plugins/modules/zos_lineinfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 42733ed96..566b13608 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -424,7 +424,7 @@ def main(): # Check if dest/src exists if not ds_utils.exists(): - module.fail_json(msg=f"src {src} does not exist.") + module.fail_json(msg=f"{src} does not exist") file_type = ds_utils.ds_type() if file_type == 'USS': From 483ee63050efad722247f2f031f003d936bcbc0a Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Tue, 10 Jan 2023 16:40:36 -0600 Subject: [PATCH 013/495] Added fragment for changelog --- changelogs/fragments/584-zos_lineinfile-error-message.yml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 changelogs/fragments/584-zos_lineinfile-error-message.yml diff --git a/changelogs/fragments/584-zos_lineinfile-error-message.yml b/changelogs/fragments/584-zos_lineinfile-error-message.yml new file mode 100644 index 000000000..f32bdfc3c --- /dev/null +++ b/changelogs/fragments/584-zos_lineinfile-error-message.yml @@ -0,0 +1,2 @@ +bugfixes: +- Fixed wrong error message when USS is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". \ No newline at end of file From 7a5bb929646bb130a70970edec82f0e882870861 Mon Sep 17 00:00:00 2001 From: Demetri Date: Tue, 10 Jan 2023 15:05:38 -0800 Subject: [PATCH 014/495] Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions --- .github/ISSUE_TEMPLATE/bug_issue.yml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 5ad715b99..85743b84b 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -84,6 +84,21 @@ body: - v1.3.6 - v1.4.0-beta.1 - v1.4.0-beta.2 + - v1.4.0 + - v1.5.0-beta.1 + - v1.5.0 + - v1.6.0-beta.1 + - v1.6.0 + - v1.7.0-beta.1 + - v1.7.0 + - v1.8.0-beta.1 + - v1.8.0 + - v1.9.0-beta.1 + - v1.9.0 + - v1.10.0-beta.1 + - v1.10.0 + - v1.11.0-beta.1 + - v1.11.0 validations: required: true - type: dropdown @@ -94,8 +109,12 @@ body: multiple: true options: - v1.0.3 + - v1.1.0 - v1.1.1 - v1.2.0 + - v1.2.1 + - v1.2.1.1 + - v1.2.2 validations: required: true - type: input From e0cbf2a9ffa3bfc6b7e5f1f784bd85f34038da7c Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Wed, 11 Jan 2023 11:56:35 -0600 Subject: [PATCH 015/495] Added rule to ignore python 2.7 compile not supporting f strings --- tests/sanity/ignore-2.11.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index c362873c0..d4eed7091 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -61,6 +61,7 @@ plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Pass plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_lineinfile.py import-2.7!skip # Python 2.7 f string is not supported plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_mount.py import-2.6!skip # Python 2.6 is unsupported From 0c4757d99e390144ee0eb92ce6d5d3722e1bb452 Mon Sep 17 00:00:00 2001 From: Oscar Fernando Flores Garcia Date: Wed, 11 Jan 2023 12:01:45 -0600 Subject: [PATCH 016/495] Corrected rule in ignore file --- tests/sanity/ignore-2.11.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index d4eed7091..7b82b5e80 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -61,7 +61,7 @@ plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Pass plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_lineinfile.py import-2.7!skip # Python 2.7 f string is not supported +plugins/modules/zos_lineinfile.py compile-2.7!skip # Python 2.7 f string is not supported plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_mount.py import-2.6!skip # Python 2.6 is unsupported From bf022520a9a10d1d256d7f67cfe807ed16fafb44 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 12 Jan 2023 09:33:09 -0600 Subject: [PATCH 017/495] Update 584-zos_lineinfile-error-message.yml --- changelogs/fragments/584-zos_lineinfile-error-message.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/584-zos_lineinfile-error-message.yml b/changelogs/fragments/584-zos_lineinfile-error-message.yml index f32bdfc3c..fad485765 100644 --- a/changelogs/fragments/584-zos_lineinfile-error-message.yml +++ b/changelogs/fragments/584-zos_lineinfile-error-message.yml @@ -1,2 +1,2 @@ bugfixes: -- Fixed wrong error message when USS is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". \ No newline at end of file +- Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". From edfae9a2ca661d9f52764c1c634dba1a031ae19c Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Fri, 13 Jan 2023 10:27:28 -0700 Subject: [PATCH 018/495] Added missing fragments for issues 309 and 408 --- changelogs/fragments/309-replace-text-zos-encode.yml | 4 ++++ changelogs/fragments/408-restore-members-on-failure.yml | 4 ++++ 2 files changed, 8 insertions(+) create mode 100644 changelogs/fragments/309-replace-text-zos-encode.yml create mode 100644 changelogs/fragments/408-restore-members-on-failure.yml diff --git a/changelogs/fragments/309-replace-text-zos-encode.yml b/changelogs/fragments/309-replace-text-zos-encode.yml new file mode 100644 index 000000000..b4ba2b53d --- /dev/null +++ b/changelogs/fragments/309-replace-text-zos-encode.yml @@ -0,0 +1,4 @@ +bugfixes: +- zos_encode - fixes a bug where converted files were not tagged afterwards + with the new code set. + (https://github.com/ansible-collections/ibm_zos_core/pull/534) \ No newline at end of file diff --git a/changelogs/fragments/408-restore-members-on-failure.yml b/changelogs/fragments/408-restore-members-on-failure.yml new file mode 100644 index 000000000..3e6c50d12 --- /dev/null +++ b/changelogs/fragments/408-restore-members-on-failure.yml @@ -0,0 +1,4 @@ +minor_changes: +- zos_copy - was enhanced to keep track of modified members in a destination + dataset, restoring them to their previous state in case of a failure. + (https://github.com/ansible-collections/ibm_zos_core/pull/551) \ No newline at end of file From 1aa0b7a6114553b3faa5af209d0cbfd072f8ee5c Mon Sep 17 00:00:00 2001 From: Demetri Date: Tue, 17 Jan 2023 20:06:06 -0800 Subject: [PATCH 019/495] update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos Signed-off-by: ddimatos --- Makefile | 4 +- make.env.encrypt | 550 +++++++++++++++++------------------ scripts/mount-shr.sh.encrypt | 142 ++++----- scripts/profile-shr.encrypt | 394 ++++++++++++------------- 4 files changed, 545 insertions(+), 545 deletions(-) diff --git a/Makefile b/Makefile index f0f6cd9d5..428f5d602 100644 --- a/Makefile +++ b/Makefile @@ -68,12 +68,12 @@ encrypt: fi @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ - echo "Remvoing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ + echo "Removing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ rm -rf scripts/mount-shr.sh.encrypt; \ fi @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ - echo "Remvoing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ + echo "Removing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ rm -rf scripts/profile-shr.encrypt; \ fi diff --git a/make.env.encrypt b/make.env.encrypt index 84560ca7f..f1b9636a2 100644 --- a/make.env.encrypt +++ b/make.env.encrypt @@ -1,275 +1,275 @@ -U2FsdGVkX1/dQmXuudOK9uJaqWJeTP647AagRJezWwFRQK4EkCJEBSRNfmQQgjKc -DFfnSa9YpJ4zSe+ecwfWZaqZUryyHC50YoGjDfqcRNmJ7Jw7vKLGqsM/KidA7IPx -6Kt1r14wwKAbZ3VadYWLnKk+CwGEMHq65wLSrP7GMVSI59+02/WpJuEjjfAXFb29 -t78+d5GTXzNDK6VgjAB908YIMfgmSeVfAP4IeqH9PaqoP0ExBYSgV/TiQa3L3nQ+ -Js0EwUsfroEk6/t9CfbJl6ZGLNPPZJlIyAAHmd7B+MKgb6b4YsBlc1R1GgghjZp3 -d8HBAahrGwSwup4f8nJMGZkDHUFvOSfQ0lp147zhpG6DkxtOzluwJZTI9hQarwR1 -o4V//OvnYxBbXybrbh3fyDt3/r1x0f+RMjQ6nUyDIQgwNWhoqsvPoQutCes63OG7 -AwRVYbjKq7pMKPWn7KOttgBJH1Bqka5TPZ9sEecSkB+wz7GvQgUrbpBYCDPcR+TS -a2WBArd+OzSYJHCj451kobTzATKPwLS2Tw0N93YG9zZ0738dydcIV7WQxQC/vtQ/ -OGQ6/ttLptEffWkICNkWpRLUsdB2Ih8HW7i9q35ynYmxDollbi7dRzulJiqbGre8 -yUL/a7qLgSD0cwIR3tZeV67YxCKeMP03fWEhrqkQwDH3tK+fYe/YeSskbqvlW4C6 -450ekOKq/9XrtenW5UZSesulndB3eqai1RdfDEr42MtWrYAV4cG7aPpy1E8JQkbs -xo3urfYeUDl7hpe+i507fLkoio1T24E1LpL1ubnw9YVicqR6kqGvuf6PfoeOHX3I -mlc7L4X5wfOKIxY8TTPjGUiCgAWQd/AfYDyo+X4KqST2EMUgevWbbwdCwxyenRcS -+8QKwCEvkmXMyeH8NrOVlji4RJAi/NUPmdtgSgKLi0o97VClp5B/yAm4WB6mJ2DT -GZs/FN8N2z7Xw7dODphsAcKwbZjyXALRJ989+Xeh+q46UNumNjePtrpzvYvS2hYA -qk917ml6hRyPiSufCbvtDGeOI0rK4T1Rur1cSx7VknePudzfnG9wbKuJ5Q38ZlKX -dOBRCpTmtthdBG1iCJ9o1av+QTud3+/r9/4L+3y9H/HA64iaU14EUs/rAma3Vyp+ -BHfnCOuZRkohEl/BCpVB6MwqK+H6g2Axh7Z8+HgsNokBgWub2/wmdeRb15Mxt77c -iGizz55NkdJ/Qv6QSzxVbKDG1aH/zhSMo0hIZzYJZ1VSIJ/YfFzmHDpRKAc9UnKK -RoPYzJBjyJ3vDanvgvhTtnTpmvxgwUodA4sEexHw2UTP3EtClj8Pku4zJnNB41Ah -Pkf5v7ondEwaBdzKb0z9MWC01CdnnJlNfjfYvwiQJ+R/2ziyhybibvm0Lwb8zTy6 -Lk8aF84G+t2zyLVhikQWfjlQ1rIZyT9WHGYGC2DtuM7fextilmTFx9ZAEtVZPQLk -PY+xVTL6oUpu9xF22NDAdkiDT2OBErzRB8nzjSBp79fsPFnmgBTdsCUYaNRc3VK5 -4dkEkEsWSDH/g9C9gdHrzUer25LPPhHNRS31z/DT1lZaZX3zcmBIXY9Qx1clwmqt -CA1gGd8sOnrM45JNzPuL45ZT1qfhjHpW08y/n4e2g3Kc1A2u2Yxq9YYjpdvXzAhR -OHeR+Upht1NN0paX3T71upRlZoIIhY0/81DPippQg5NjNr9R8M5TQ/93oZ9DFH4+ -36qdcuYkqfUwAc8dT9bMFMG3pPKdvqNs+3IQ/390P/DJtnBYeRkIw8VSiGTV61fr -F8nwihZD/LKWitOSJny1MRHBe+UOgS8q9vk0NiPoPMEKZ3YJefCpOwh0/mnfDb6E -EQjec9XOQeu6iY/s53XccJFF7UQIL/zo5EPcLWkabOQPXSf5w4HeCJi24I/wv/wZ -Hd2rxa6hAAmSMbdwFjdAEd4Pqzd+mjztEaXLmxz6m6IOc5aElLAYd8Cz/5/oArPk -6R2CntX0Kd3HfPLDAuE0E1t9jV/3KgT6BV+x+bqIzIo5TCx8vE4Wy8ryxyRomk00 -FF86CZSzBKwNE2sPrE1fIVZtifn5xc7SCpxaopaN6FK4t+R1Shcio6tpMDG8/t0F -TY/oIDuxxtYIvs6C/eGs2TxbxvjOls0VdZOb00C0oPkIM58TOi815GcdK+EhxuNk -sM5YSh1zHjR868A+dHEinYLfKHPFyKTr2l97rxThl5T2i+jnbxiF7jS3p/XNCFoG -yyqJFUXTws3xPH5EaS4PEhURu7uxrJteie6bGPB0qDb1QxZLyRNaw4zdj6Uf3Olq -FAgnCbbHm/MMUEWWzZ4K7O1A3IZPwy81FfylKjs2rdZW/Nho0fxPmo6oI1VeQOB6 -M8b9anBNtBEW537L+3noJecitGdRreUijS54qrrQOAV/1C2tpIQlsF9V+JbbFV9P -9KDnYcDi7HmPydehAXHITMjhaArmuNfJ3WGBEV9T8Cjan1R9NQtmUHMEuxfCHf2M -RXfb/Rn22iNsdYmBELJJ6hF5PdKp+Wi7R2tY4auWYCpAlqp2qjy6x7miTCXrf4QV -Rz1gHWn71P1FyfTgap1M4ICqdmZKFPUIrLJqV/on9SPT/KPM5q+W9sLiXo+K7K51 -nbNvatChQ2bTe+tSFZ2d7NUTCSDXH1nHK4DtlWQ+FEVCNZrqAxbqhFCSPvAlTuz9 -eK+HkTvvm+JXGBJLdT8VHvffNBLfakkLf6OqTqLu/b5O7gB7A17mYxX0ifqFNvrB -51x4dPmhu1vs63Y8fo1vaGEOx4BrS08HZCFjP0jPWjemaffSCqaO3kDgi2AmdCxt -5M5JEbp9U05aBoD+q5KsgoTR64sHadhZx6zA+hqNMzsRCG4eKTKFchdbgpjENrWw -KzKNuv7CXDtdGLIcZW6upozo4z0WbcsTHQ7G1kH6YFXZnrxHXFHrHUMnNaBGf8MX -hvcnZb2w7m1s+gN6frHnHGwJq+l9dHXVOWwHGbiqbiWqXzZh6SrfRVFfbd6ed4aS -9E1VzfWCG4xdhRlEPAqUBvkDbDsxoaj1vXxwPwwqaKRBbrbCJ8NPuXO9b6TC0yEG -74h/DMtumEIVLXTwFWJa1au4ZPMUXaCFW5nUJnrE1hYjWdi5/ro1Cdq9eIDkWVTQ -0xZBzuXKsnNi6ysQlzTZ4GyOgiF4Tkd49nuGwgqjCPJKauCyVVBQzRxfpWYWM1tB -r6A5JWz9EMRJraXwFkcKwP+olG5/CsO6eF+A/0mwjdQlFlQX7x7meLhdpTT8wV8W -/yWqmzhSR5s9su/Gd/q4hdWBYbbiEQYUWrQiUUSLUHwyuF4bMSTHZOcL7YOjTRjt -SQO9/th8EWrs+X8qV4ElDlEEXWjLcmZ8juPfpCApkiGxmc+31W8IvvrODH/KLuJ6 -29PYVfgugaGCEgIU15xNHLgSWO7IvHHZhXb2C93ykVlabee393TeoNGpRZH7TFRA -3mZPz6OlQoApvkyrmrlSWajCLfXFlWAspy2NQAValZsaQhzRNM54DB9XV/RWoHob -26lrCL5iqTs3POXZdPbZHVzC8scugXCGUdVcVKEG/fnAuUl5HE5AVpxt4m/wPj1l -CMo4G7U7DrQo/+161VlnY6zELsJL/8MVeMIzoEfWe+Uw6uroNFNg04ZCdS6IacYc -rKwzPZJjCIQq0n/NLlJzLgQFrzf1Hy9NyHfRFcguUegAIcOv5rTBlO4P6OBSdtwl -3JgU4enrHHKkans9S2Vkt5CZ5smdjv1DUSG02QTEpGmYhuJGxTnUwymGpmPePDtD -o8xLwLrb3zi0ht34dGWenzBFX9QeEAFEAyA9SMrwPTRw6/xL5zIkgRnC1yhZSWz9 -eQYnQ83yQIQQ/mrOrTOBp+YW3AL54Kj3VgxsXRIyqjRMX9Z9FrnNB3b+eHbRbiEo -5al4U1wEgRfCu5mSqq3wDROE1Kgsv/0/Ju4NIwatfSxV4S7l2NfLhY4gbPcx+qMU -CKNnLldJBgl7HappKb62MMWN1xx32ORwfQ6Dnwe5utZiidfn27lNbGjZLKXGBoWj -C+/jyoDuxh3vvNuW6Zfca2kYhKcf78xG5sMdRBB56a5yDBdvjmM+TLhNRdfL0bch -varecctAk3wKeO56Hstg3xjmjF3ItdoCiQs3JgJPUcoACGsvnWILZKKCJoa7/Z/U -XweFl9703N9qk11Siq9NFiIuzHsauNkPprAQ8uamC1JV3dm7L1fNVOBbCvTkkZ0K -NOwIrx//OAXPyqlLR9SJ30LkgzvmbtftyYvpMIPqMVgq7qjmebe3YdonIzoAJPwq -ggfaub3yZNLucKwZgiOqdW280b6k8SEuDDtGYsj9v5NgwKrpaeQ2z7oZPiTUMeCJ -yNC/MQqhXC37IuKbLywRKipHrcNKpz2v3bLpmPKIztWTVGGy52+HeR8NR98LMucd -qnZsaGJyi/7ShBzPI+VR2KQU6ibIBsnjCF7hqV1wZVX+D3zSCRXsimlEncHLjDWg -9xvWNuhuyYBqSZYyFVz1N9MjY0QGH5SQWC9T359LTyyHR+E02ClCBITfBduHEqcV -yMpNUE0dXDLU3lxx5BiiswFfEnFtqzKaXNbrFWCy/Dn3QiXw/2nbpXG7ZvyVR3CW -FKEF/xtIn25zhgTL0o8eTA8sv5E25WHhP1+UKtiP3j3rWy+6alRpa+OAcDWWzEd9 -9spR9Hy7iYuWNHget5nsxodxttKduPaA0MH/aM2N1b/dU1/gC3bTX7ConfDWNTlX -Oc2ed4frD8vrkmeQ+12nkWPR5wOm9ZRoK6tJEbVBBcdZhVhzZxXC7RX+oU8Vpv3o -KlXGgMBL1VzXNJpTSzT6QK1gDNx3wt1Jh8wPsBvfkyV7/pTIRys2JVFkRN8AdG2D -lTD++e05b8Y5dRNUdC5fZFqqi1CnYXq1rRME6Tscqce/dBVLl0TGhVcdAhTlmvgO -l92H5adIPVav6hsGMtlkEj9zhp9O+OcZj2Va51ypvJoDMr8Ks1QEr/gTieRNcgtc -yDB4g+i1d0jb4JPdLuyEkj49fQgJyOgFbHS0dInMqH8b8zq2kFbt6Woj7IEktkI1 -T7JhHovq+aL22kelfiqDaLmGz8Ah5uT/TUL9eGJxfMf8xCxLt1mERbuMNjjUQpjm -jNRxzPhZwT5zrFllxw0+hN7rex6f4r1xhdWvxMVZs1pbbQMU48UFN8i7ANYx8qC/ -oHbJ//N528Q8VhF56Kr8W51bg5rhPIUcGWSHmUKhWeuXXrBSPC7+7/sXRyxfxXml -kqotKcGVd+Vmbhd1jt3oVzu1FqL0udJeEkJuWdRlPSJgb+f3nVScLDw4wVLLeM5X -2159lwvblqqtB7BiNiEDEeI2Jy4Fs8YxY2YNp2/ldpJc7jKB6VQ+H9SRn5wpQifF -TOoLO8C/ubwoNYSEO/UePuYVLDFX4KsPnZ2hY89N31RBBdhrOgi9oLNxQfOzWzpP -RMpjX9a6aXRGfqGGQtvDi2sWgfLvN8iQf/slB+z2mckdbamtbxOry1riBOVjdeaO -ksXhh6fq3wa6VFhRYkyM67zo/gdwAFDAwmcHX/UEeFqlhDqT6z2UTDCr3YBTBeca -QddvuY0tGI1ql+TQ5GCyBAXUkUR2fZEQ3aVYbOHs+YXyMH6iLfgLLo0Kuv0cgAOS -huJ62BtfhkXzBD5KlYMNBS6MBkKFwcwOQ2corgG+ViRS2IreIQlKMIAt4sn9zs1g -RsJVJ13Z4KOP5b29C/NhMQRNzSOFHFewc2uNvuNuI74IQpZFAVRwDm7kxIdOzDNu -8Mb1jzbLLsKGfqRpCxEZPV+AuIRnMdog2+sCnkMDvbdIgulHT/S4WHx290dO2HrV -UOf41Tc5QcWpDwnG2cKLMkweIdh6HeoW7y69FRDqFho4dLTHTPvc/a37t6SVFF3K -0O1vyDLK5F39GTdW6ROdWjd4IdjQ4R2qtLoxUM+gwyz9J782/AlXcSuc+8p0XnLA -nxHCwf6AmNgoDmI7a+WQiaqWlYwtVfscwnjREqvAXCtqqTkEg8wWOhVFIjWqcPwp -1fmzo+XsSDY+uoAXMtdSIyAFjhsmP5XZXASuvDR9htX/1iKI2imJGj+KilYMmbDu -wuqfLtLTytV7WOjI5W6qry5xYlYmcdM586WXEro4p4A/6droqsw58czGxPtpgS1p -k3cWM8qNQk4DflOR3deaq6nu3wUujnt1QA3jmTQMGSnVbVV/W7kWcLeglsYfr3Lq -9lILrj7TTaeGMsJlCN2TDN74eHCnwdjsJDkl8A+Te0TZm+HctwAIXTurvM2O9CY/ -bZc8NfQyVyVBqkzFViXrsMQ7/s5fMliIJRx8VlXAwl0Td5GgwQKP0hLILZG0wpDA -rS/u47IY0LYwXMXY254dQGXUdWtPUnkjb2EpdMbHsfX64NyBff3N7kv4wJENpktz -4XFtT8F7CCldM1DS8RcejeM4KuJx3FzQyabocKdTbJ9m6g9AUclXDz49HoeCw5+9 -nQlkzf0nlvYJNaMzsxCVpG9nZVfejFLqNilVH9TEA5wSHyRqY4WXBpI7ypUlDrQS -plFGaGHP2cX+d+36ZfQ2ZU0nizTFL0aLB5uDhKDa2BJj4Pp8aPKfhb4Ea8+dnx6S -QnSduizbVtfEQXoyLu0ICQcE/p3u+9FazKF7QEgQzkeI3MahoyZIfDK9A/5gDv1e -WEvttwvk+ax7EnWRx/W0GUiCHE/L7qV98+zErMeklcbSWwhI5CuqUfwq/tAyRcv9 -faNKX1+o+IuF7tCApQmiZ5xTqvnatM0VAi/HuIoVqtF3V/NAEKQ79rrQcXW30l48 -tGTWNo8+BGXhFYxs6O6B/RBGaMEnWXGX4Eel9k3+IngoDK0BP+fDn8Zwt2bYo9qF -Q/aFFPKItfWSXQ8t/ITtMaNRkEPgwP2O98NTh14bToOaIkwQqKIA1484ajya5dVG -HFJ1LbauzJiEKXv9xIPXxGFqcnkxPdmnnlfpitvvJyptbOvfUIXid8SpOF8za28g -HrXhw9At7t7uysI6t7KC03Q1pMuT7vH4VHyqQHPbTLnHaWhSPG6lv1XgOpax2Tsj -y8zoj80GlOEyJmCN3+aEFBXzmsadz8u3/2doqO0WdxTZS4+HIk4qOUaUk7LDxBOq -qEM8KioAAE61lDc8wTtjB4r4iSCkwOu4/zoaYoTbBRPptO1fbVDUUI9JIo8v7F4B -+i4fAZYBd4De6TUnaSVptHanh8IRI+QZh2mKFtQVI4dNg0kUFmCXR/dQD6UflZ7s -uzOzjKOLwM9AmsuFJh2k6Q7oe1dy4wsggfjlOJz+JwbItiKCx9c0Tr3hGf4M8Xay -EXVPWdGvRZDFOM/aVmYDrMErJNPxUtnVQBAzNUgnMqxdEFHhg9SkUHMai6hBBACH -NU1YO7tj4FJRLywfu+YmNJAV3rUecmytfkXbQLzXqIEE01bQliQ3e5n58CyBoPMm -bL4p0BRhEiA6hw2jex4c4u30sl8CaGnj6VRVpyTcMyNO8Ge72C58RUFSbm85vHpH -QBFlPs2eGbyCw3qODZgkXUte3CGWu1EBK9ri3h6PFgfsOz7euKrw0+pc5K4jfkgy -IGkOODvNT2rx39YPR8T2IEw3tyWKxJkuxydo9QOV3ElUPwkvhoh6My10LKQNkXNM -q+ExXJJ+iHLVHOk9e9JNI5fL1rU8I7jCL4V9RQGc6wuAwqp3BzUk/W8Pa+COTzgN -qD5am21mQa0qpb9XfNqehs8aINRhi2sTpm+g5xkZfDJxQGyI8qaMWCwLoMPe0nZU -dOOimc2QRZ215O5J4WIaAjrOmrBQqzcHvrISmJ4nL6Gfph+C0cWk7mx2QEWD3zYr -pvU92KSK5rt+bS5g+MAdrI7NNI5M7dPooFd1xSZ0ZI/UEuerH7bZjM87Oho3GKtd -fySGNv6YnGDx9/EKYRZtpxX0FhHkXOdPtDenO7ROdbqomAaQ0zVZIPXfwPYgOKrQ -vIGdscOkcChjdCQSigPYieiR9Z9Nawg5auUI20seu2/J5yRNgKsDANNV26H2lHNt -X8O9CL/trV4BELP7Qh6/2bPJ5V5cyzk6sj3quzxOvvenclfXy3KOqIiDmeR7qbWv -1uK7wgtgl0WUHwN01PSPCaUEb6jrYPJUuusm6ZDheXelfit6KtdXWUTsQxKDCIXL -LPpFtYPCVH08ds5+YvZ6bOKPbpSVH63ixVFlMpQl494OzjcvbGnOx5IkBXKKD2pG -ONnSnr3FeBkrI5jd3uqFVZ5gQ0I7VRfhX8y/9TmAjy1wunw8VdSwWZ/ds8ZNNUfM -TfEwr6mwU6vGquduyGDW/ExJKIXWBhN2g7kmxbp9m72FxTs9NEP+cWETkxL3L74P -bQMRKo7hLpMRgibuHJYu7/6ixoylIp9LOZcMqVLdnRAzEC94HGOSogmWVfxScmUJ -z/9iot0ikASs1V9HXf3GMtuoQ/LEFEHpr/a6BkRyt28tuC8wqtAPlotln5K9rmLL -wTpPK2cfFh/5LKWFBrw+2TUGAejpw5BRCWy4E6WguFQcKT9MD4Nbk/SKLrgUKOJt -RqxTqDF20bJOePOlVuvqjJois7OZrvyvPf7fY8enCA+ETYnH8hDAfvbz7TNo8AgB -GR5FtwV7mZPGd2391KkOdr3s/WX6qHVG91bgHqsPe45gSLgpW+u4d5Wc0zVRU+a1 -HNUl/djNi4XmGAptqsar0lYnEsHgjCJx5iz3+wuqnVuVJVzpgPeMsmFW7Rr/5arO -tSBLzcGMPBHzfA8/l0+MyG4ddZj+lciQ44vDnEGiUU3xyEoDi5e/IpmjHm+e2eSo -DI1WTPmGZ/PFEFp9DTvDt+SXmI3YjJjnGHtSOOvNMgD3a0hR8OwmtSAAerSkYq0b -NRCAoTMPQP4LezSuwsNn092nTVgGr/MFNfKwiJcIQU/ZFa3kvIa9T4aUICw6Rd9+ -p6pUz1rP+S8xx1Ipb/fHcIp4+XYSEIto17V2azovu+0tzlc6MAXetJ2Vwq0O+pZp -S9fj8mO085WuxVR3TEGQS2dKseSFNCn5FsQRiA5iAfxH6zxbTK4oLazFpT9ovO5I -3sE2KAIkZzfg5Yt9erWmI5dN4nqXj/jQuJ0HcjdNZOY+NIxeGnBC3TxbgspA87Sy -nUqIUshyNd7qTLJ6oLLNFYm5LdvYwXgLpKYq/fCIGSu1zUlLqq6YsVTgQoZ1PF+k -PfPBOTU0AMCbuUD2LUOq6Yp1H1USBkUPeKuBnyYx3flF6TNlz2fK1Bl4cbUt4WZt -9kd1i6Itvp0mBQni1Tx29KQvtdzy6ffX9aqkEjh7N6A82OwSWI1naJi++rMiW4Dq -BOeeJV1i0f1JEZndVSxHpr7GVuEWK5XDAJAeDfwp1xFPf1Ct+/rdJISiToQR2agM -SDkluezwjUb3UgELiGiXxhTNvDaMQ9sdix2hRXg0PJOrtC86HuX4PNnClIzagJEv -Vzb0pLO6jU0zkI2fcVT7ooldy03on1+3S1Enm3OHV2QApQJgcKtk4+v3qQBu80jq -kNPuUDJUrER1nxT3Ehf3cOZ3Z5p8HqTtKKC4Br8jV4Qs/kbhXKvedtjadSU7U4je -fsp7LimVozF6YA1bpitff0ejWCITA9c17A9Yv3IYQU49XLNxooJ7IlFTPmmjGdkU -V2ubbmwkAoHyy19FySqsuDUwCQ6S4Lb3Lso0kxprNP0w8QP6PVbUT3qFDdvn2hg7 -GSQBafoIQDhYG2/tOKWpayLSro/iFHSlHfoKYei2UBi5ym4qFQ0Bgc/A3i9eYXsY -3nAbi8xla5OEhoLOmxTN6DAqGAWVlKDPpYrWR6DTKjG2yONG3OY00xUhy8jAmRpO -rgxyqygHPFcBpUjTwqQE9LvIj8af2BIlI8iRcqo3vqwPRbuwTwNkKFVgyWhXoTEV -JUGcdDXHkYC1NUHBOs9lYLR20MuHaa8ue9iO7b7eI4sRxwJT8byUG8ZfoUTP6har -QMCddHAVbNKcQMbKDOb2C4UpX+cMGxuPHRCupGxX4yL+lWyuEYvl3XwE5Xoo9yjL -ZrVosHwHNRZ28/xLK1ZwFo1crsDO28s6E/hiw2wORGALvrMEpE9872RFuhUmSKG+ -/pTnHEEmGKY+8Y/ZqSf8KfNE5lbvH2wXon32NdX9M8lheXfYAOYXgi2Z4x00Ep9D -vRzqxJOZrIqwmwZ2ILot94oqQhki5Y2+THfUCBc7IG0IL/pxm1GaZhvJxjC5n/xn -P9eS2W1CEIJUeQtSJCjumTNFIWwvJST1AVWt5GVjqFlVT1qqciQM63Rp2OU+7DMz -/CfV9egGgMW5q1dLcLVkyhbDBrg0lqzTHs7wOot1P2DPwZK9wi2LW13q8ZGWsISB -16LjJDeQlRnUkT5tVrse8UnKBsujm9Dy+NOnkP8rK97FD4mHgGDu2+vDz2g6kiYg -oMEuUvxNCnlUM2oa/mclef7ULysMfznu8/ANkWikuEi1BDIhj5ww/8R2N0nwb8Bs -rpRBK/ZojohyGfc1K5iLN81ACHEIy7BcGpYyutkSFDVlUhLKz1+bgB8ZV7VJtFYT -diJu+5yB4PcHNQsF+9RJOQuq5qYEfVVP4JF5k6degKNPUvHqJSgDGL8WC/eiCCkq -MbR9ueZwkHW0LCZ2WGd8Xb920PAAJic8tpvDFMyogZ0OmdwZhPlI8kUfV/STqmmx -N22irC+BB4E0oShiWoUK4sqE8vloqEOLcjB4ijaCNhkBhEtA72jLCMngidMChcqq -+92tHWKKgNccDBF2yaydlreEwhB/bx7yzxA+e/kCr6tPxb3JeuOLvbwvG34D5x/5 -MjRs51RmVO/JE/vsob67PosrylzJW/NR94rDP/knxEzb183sRWsAEUffBShWFd9s -ee7X+4bHBraNz62P1CinP9I89j5+QFXHWAtf6I4cP5RLyhYnI6yFlvU8i8AM1yap -visnzlywV4zf0K9SpxqpX0a48kWUDLwdK+k/or15jziQDQ9lXTDhjuGvK8eDc39m -ZZw56RcKeu9EeD9DnAzisKEucmKMrkbMLUi+huQMuPRKlE4vIUR+/L57BR3D2fN8 -BTp2tGIidDzvYCndK58KfitmjFHaU9rI6m1UqfkZBQ2wt9JIbFBQ9LWsSKTaEs0h -usPT48tVzcxpz0u5ED9opj5XRa76YkvE8pZjHGQvYytx68M6VmjZXPJgVVByBWEc -xoXBSFBYr/OrreZludUEPbSbjWVtKpwm0V/Hv/7xO/TrWpxZR3Baz55EteTrP+ia -LPxKf2DC/p92jor0ZC3qKvXf549W4rtwU8jfX2WnN0WzH7ETjqzitjl+RFH01Ocy -Ji0xN7W+3OSKebR9xkXCnyHvnwRBmvJtaAh/8uW9KNX9FiUkABrmFzt7T6luri6B -cUnyD3hjwTQy00dEZd8XDqPE/AP2QkNtwl+FE4sAhGosrk4OEqvnv7adhvftHAY1 -2FoRlF3UGLe8WLEbvx1Gd72o9wUXPjhNf//Xd6tgB5nyzzjLOkylJdxyJip7Qu3b -8dkwO5aWtPMk/fJBd05+KAK1RjkWWvGrfm3TU17/e8OqS4NCwa44AmiR9hTOnit5 -0jTAvSidad32EYzrwlsG7bHuOgNT0+Y2OyJI7CMx5j9zdp17h3cBCzJ2eXaEWD7F -28ahJCryBSPODec2Gq1vkC/OQPvUIltfdz4P4RpksUbnZPYRq+yIa1Fnx0Gd9qea -9upeTFiQmqX7FgybQPdeGQMgLO+AaDNtgDuMmwG2tRqtxoVBA8X9xK8zvx0Nczbf -fB9CmqiD/vx8s9fI2HOVULzbpOFgZdGK4vY1G2IEvE/9hS08uoyH6uuUWDC/5fuM -lrMVUju2iWgtz74XiPRmU7h2CPsKJtPD16qKlscBEvd6Jm1F/cAbkO1MR3kZJLVO -Z3bFSbchpdedqAM4pYsLSKHVG30Hi9U9pDz3P5L0Z+wlecj8alXT9sFxwGJX9oNG -TbEzxy+9ftrTVb1C5IWFYRoi3QBrF9idebLQA69W6u/xtjLfDcP/GhElKagdyr6H -wvCzFQMyqDMa2soy6u7z5hjxohSYIlMDvx9SYm7WExUMzCu+gl+ZbgwQDi1ezKVf -dZ2FYTEs8L0CNIuptxVFV3IRPK8DD45thxodBsBv6+QYBKisYHzo+Cd8Yb/t0saE -ikDoLIqNRgDBjdT3M3lvFYaPEzUtKyRTC64FjY4xuSIlGFUJUA9JMGTGZWjV7oWQ -7W86VEtoPVa4WJ0dYN0pxbpseBqybMTMuHfAayv2B3R4MXMoiTmKq6uxdMKMdIvU -JUXlHmFpttAJRRG3/KiRzl/KCeaMQBxeNk2iPdCSM2ZQJiLUgYmvcqblqrOYtdmW -sqkYLBrJKf4wXxqEDVYvW6/1cfOxLuKSUU9zwXmMpyY4NZmNPicNt7VmAN1n21he -S30GZoo69B8sK0pQJVL4G16yn/H+I/eqqkMkmqITpaUmMY90EaxhN7172EheTbkt -OJdUVqEsHnd1qMPvMral4ybsnBiVJCJX9D17AHOKuAu2LYBy6WgH+akZEWY7KBy+ -yhwtlqBrHAfRw83LXCKavpUZ+U+KfXcmdiewdzLDlYryRKamCMVbuoFNLLX/EnDU -eEz3fDocdTT30ckju+/5LS2O4Puqqf27aE2+aWqfpfdueLCfS5TiGYlyYzMsaZim -X14b45BZd9JnPlVbs1DWrQvw2Z0L9VjGPRlbKoyb+m+uso5SUf12YVV4+foGKn+k -m2l1PkVPCpvFG+CAWLq1S5rV6SgyWa7li4oLlJrAYPB2266ZtNz+zK4pvlIcMNGo -YqWsDe91NNeOik90V4W3b17/SbA3kEBFSLgyTmyW/IIoXtyDSsXKWRFt/3DwlMwb -8yKkGLelowUoCi18q9TabGq6AF4M7+daBWkJ270ikxmwTLLEESm7p7T5ngSa9lXG -pqXqZribQOL5vcY4gVyPnk2m5cI1/7PlqrT2Kn4R9g1M2A+ONrAYmx5ztz7csckb -5nZvcDJF0XFgVkOAi4c64delaeKm4Ks04rjU4ZfOzNIRoa7wMXeO0tRMni6Zr7VM -zVjWDfkUnV76MAnVNcQg00nvSsDsiTE06OIk5ksHc8GRV9Pz9Rngw/YhdbVz/bp9 -1PfDhknB56HELIRmiwWuJf1DVGO5lMg3cx3LISEE1Jz34b8dV1jEaOGYQgSYqv+Z -QuxxVftBRjwbF+dMc7uEVPWyBiWE+xoyioOXeFsAkFNrnGADvyki1qopQPMOll5I -fqbjR6UwDLntTWTvU162VdtrI1dEZPsBpySFuO7o9bmPgF+sQGYQpvrzqdhr8APM -CFZVyUqsmp6PyPRmLxB3nCVk7vIv19i+Bjt3iwLwH/5dzc9AaLnleIVwWbKvz1ch -FHca5OqqSU6hux9nPtl1ES/4Ysex4BKZ868pgFdzHRnssTKvkWEpbH+6FlOYS7i4 -CHEwyDJ5yUpwMmCEWI4V4PvLkCZZZfWau1lTgF4gKlUVELhKjUJV//wunR9fa6B7 -2Ujpcpx4WzoV6IacU/A/POc5K3LAFL8TNq2NMmLp7NNZWNt1ldREMLuJK4bTF5+p -AJu+gnNUWMGfSQNr1cURMvQFBZqcmwgkqz3/x1nwh1FNiiUCMQgsPxXmFFY/j9rn -/KpvLw5CgxBohPT/H7ESvTJqUg0ojbKBT1wJhwseSH0XfC27SScnNe5wLglxiIQC -Nqz/erFNTQSn5i0/1MqUViANl/0Tz/BVrDYu0rkr+BmLkrBhM7ehoJ2rk9nI+VKi -7FEigsjcUmY+AoD3Bnmfwc1Y4ts6W7vI9Acz8Gu8uREUNUrkzhqT23zuZFK6ny8e -GR/X+zsonScEgj39kXavhTfFZIFDqhsR4NlBEQDDT/NizQuLDb5KGVqWLeqbW/Nh -TNrdv82e2SJN0W2Kt9KX9u0rr0UIFrpJ4zSM2ZDRNpKv8S5t7gTsU5z4QVad0Ef7 -G0FP2mp5Eu193XyAlLSuapscqBGuScMjqdUMyeePlAxkC/P3APsddAGNKA0TxU7H -/pm0+XvQvl1LPqyovkSP5k4e0K1wblKsRIye6ev+Riey8ZqQh+x8BwuP4E8e98yn -4y3Nh09GQ28pj/bTGgktHOqoB6TWNuAtICIG26yLL9GpJpRKK4uNo83JhIpsKYCk -6KRA61MfsJP/72kW0acvNdFxyt6GLoUllPmQN7GKsomGG/wZGrUoKZKnUNCU9PlT -b3oKPjwntp3Ula8bm/qD4+FKlDRtfcrbHERJo5uBTvMmtocpgoYDS6ICVPr26cWE -CjiVH7hFGXG3tX9tPbbYArg+Rn+YrA8DH4NQdBFvs+0T7tWD5XNNcW8WyBbxxs4h -ifDDz1OeT7qy9+u94/faPjlDt8kiRx9dxWpDJ6l+Y/+QzA/l4PSTfOpdrkwgwg4l -0fFxI9yeoP1q6gLlNdixcvUwK5mY6saqSkeiPl1FDV/j3YRVq6q8MWYuMUzeLIdL -VOTuaOYHAhm6e9J93ZC7Nx+kQA6y6CRvM0DUFUI02kQ9rNAozSru9mRiGmkAOE2E -CXcKkllAWVHSxRt5EF8Qp84IpHGXhctImwwK2KAzIrURw/HuoqHDhtjgTX56Cexq -d+QceF2f83a2THJyG+FSy8YdMRXmJcBcJk22mHKbN2GoxnswSlvHXAlDyvylrgBe -KU+qsOzf9CV403K1X72DJNqhHvUbI/oV5SHKKuzolk6jlw93vkbhG9ELyr/Akc2I -jpd8rTpcOo5ZsAAW1Zjey2LSljqT37vtHxevlXpwuMiKmcTeG8GixP11FYaO//cn -moJuZkNNTkBtjTXFei7VTrokqtzY5y+PxEhXMtYX5DNxfYkVGZUzobTNRYZm4jhk -9Ow1aI204A5xaEIgBmQEjae09IKXTJFlbM+iDeuQwqLcif4RyFe37uS5kl7ROLrs -24YcJC7wPpgu+RZ7clTGBZwDkLi5Ch7l6czpAquYtZVdnffpeXJl9l3P4fzKs3T1 -5vHStL86MbL9Uzd6+FuSMR8JS+b6aF9qzu1K5jSmod7uvwYPNRzJUPgZ53GbDNv7 -k5Clb8orien4/0qW57A9oCPz0dunySgIK1SQ2h//Q020zWQ86S3m0/xcpK5hCOYf -uuYtrPZ8TNNAlzUcXe+8kI/PtxB0T00TCNtksXBDKkqKp9stNJ9SyCGXrOz9rsDJ -9xfwVfLoTbjKvQxGNcg08QBijtUmSAaIPdYLm427ptflUo2Qda7g0H2mG0T8TYrS -8yb53ajDUv6oFb2X7hs8dmvu24l8XOqejK3ZvVobn8wppXzRGm3t5S2FYLKXu8Bm -R6FKg7XXxME1KfrMsKfeGglv/XrK8Hwa2VQt4BqsEnhI8cOsw7pYPhx2PUbJ3Aqk -JlxWGeYFWlLc8Nim51O7h3ymLIBTlMHPrQYEL0dBWJ+Rgg4Y9NmgkZwYGhto7fk2 -0MUeinUUYTCelCxIxmESdvYqA4IybFnm0Nr0VLEtnBT37+rty+haGKCxDzPFyNdK -xV4Py/VO0LR0e4qp/yn5naLtsMqL96wTtC/CUYlu6fp9MczwXFWVy/9c2cmIJYwm -/8YMzKwJIBypZMy8tChnScZLTeotoRrg836pghgXmn3SKvPBbnoaRQxzkyGC/7Vw -pY0NpAw3+Qw/0+dxTdTWS+fIKfTdpyQfP1VU6qfshkvJRn/5fanveFZvhi73Lkw7 -Khh2XMzL4hHZNKQ3Msy8daJTiyRVB1BjwCOmSYEuWfsdj9F8Tg1KYWvcEUV52CV5 -B+BsP91tiM/0Slkl3CuXKioOL5W9qXbLSF3ERX180ayla/tG69KwmLnc5dHF/4yU -3tlAg3tun50qjd5eFgmVChloTL0P5XaV7UVzyJbxe3yvZK9sujROy5X2bduunrQ/ -w1a6v6Rc8kTZnXzOLJKcaIWPFLoXVbgA7GCZ++Wxozw//LvqMTuijVcL82jH1Slg -u/TeQDOnr84qS5rhwyzczfsYFNUqYLfcPkzrqykaWxPC6viUygqPgKIS9BL2uX3K -nXkK+dHTZSr/8zn+tirg04YJjgAMpNsJr0EINQ/nGF+xtVmmhmEMvz+AOaTETqyp -d2QehC6Wzc2h2uvKGAA7ZEArTXM7JahbH7rEmvf+O/EmJAjbpsAW7kg59xbobB6H -nUTo76mde2Av0gyujejHehqAf/+e+/xFej5suHO41Z4baUZ3uTk9d0f+AqNJV2FK -XJD1HtOdecyzulN5z5bw1BSkgzhwHbZZhW52nWX58dEDE72iV0qpau5kQDw0MJcV -d/xQ1Uarm/JVv0WYfF5ZE+wXw4Yzp5E13aYVdMzaQapi6IAzdmp7kArcIB+fPvW6 -Z8uDs/zo6aqJmsyoLWDc04Q26b9FrLGJvIaIv878WhIrwjIld82JlUoGix73gz0+ -xBMynpdDej6cPEzTh8FHIXDaU7dwddZhk+Ulxl48YOukUHN/0sEYfXCwxxvH9AAx -w6ymgw/6tByOjItiry1HZLkCmhIunNY/d7IE9wgFteaMbEdwxVZchqsPX1yHw+Uh -YxjBHL15F3pocXUTC4p+xL1gygoPvU8gLI89eWgIkR38qXnz5NQQPNv5+2/Bmt16 -JZlNI2YItNrS/NAcIu/ay+i6O9iltc3T8u0LVgqg/naMoT3QjLo2PFQZp9jpZ+AB -aRSMV4wnipz+Xum5uRP1yy2MiCcezhIsoPn9Ig3A2j2FBuJKqTVeSngR9GLTsZeL -EWaWSNvOHxHkg1jkKiNpyI4A2e+lWj63cyJQFwta9CEVNC9HJ+52Y8UrKIpD5U+C -pWHYSDRNnVEhIXLdgiFs0QHUNuannULPmYNlw3GnOAK6XQcJ8lrVJ34KOKmKWanf -CRh2q/g/9qLBlTazvFbJ0btMjfEMHr6OF07bCSGWaqIrY+4+Swf2JvXFvJMSRdaP -NXbLLYKdjqWwvp3eX4xx9qRZ3u6kW3/WbEmdnjEEStErx0I0Oki/a16rE+2jQfLG -E/SqcwP4nJcvS+umH3CHncSH9AdKTybc2FmU7232b1OkYTHg9c1VUY3Fv5DA3YOs -xwARpaQxVhUxkcg5dXiC0V8A2g80jajKQZSr61kTUzKtZVOR38fDwC1jkLbU4HpM -lKHEikoiYjn/CYAdHonsWmeQEyiv/jrN236PGDvhQRzi78YykrvwKlM3Mg52vW9o -HA0qBHiMiImxldw3M5hXpddDRn2Vh+FbWxw8wwLLA6XsZifjcCWhEpROLJI1odQI -uEh6oZVbEhFcyZiLled+2kt+Y6QchwwoQQf3e9TE2nH2MD4qwjFcZivnJ487ejjP -uX1cQsSv7+YxRejFtkVz5zELL00ylveAzbgXEC1+84j9rbskjqfvceBFwy+4iG1W -XYxBznYdoLZwixneZ/JWFaePJkqd1RSKjAxskR/2DXiUmjI5eH820u1oMPWzDXsx -kpN8AmmccoftaLw/t1yHViwM5jgMG8i9c0Hl2qKSkfJHvy12FkiHyP6hqwYynib0 -zf6Jp3pfQq/yaF6YzjGX1vm+RnS9Wtyg6aw9G4ZrtefKlg5t43/vMUlXbTUxTqlt -2QHeHFWtYD5LYqVY3h0uaMQnV+Lk1TyiPeh60fTA8tfNbOwjzWbZ4Q0y2CnnDGWN -uFW3Vy/zhTh6j2AkmAurXaAYQKXPsQG98jgagVEKPrlXUC37lKtQ3Q== +U2FsdGVkX1986PbRMb2EokSrLE9lJ2+nW9OfuyA0vNn39kfHerFqT6axJCldzuZS +6cIbHi/WZtTpwjxUUKChgjSrLtZ9o4IlDPBn5qxMMxtVLhfJmVDwOvUy9NvAtHJz +tfIOOZvvZzDTJ7ewkywkyxk94JseMdn5/GhiHinpOin29cNYZ6cuxeYE5Lihz9tt +3vP/S211Oi1LGYjd5kfuoSrfr/7Pkdhd/nkyiL/r7yOtPtmSeU++D5VQsbLzYfRz +Umo983TNZJ25w3FMsrbZWR/1EPXd5Dv4+S+FAmZES8YSi4lwmnFbHnnJhB9XBV8R +K/+puWCgOH8UiAdymvaaAlG2rRzu8jQtitrOnSbONOCJn0+Fh/wW53JVsGdLjUS6 +a2ZKP+g8G3KR1aJkPG2NdjG+4IzoCaa5G9/YPP+tZI+6rKrPm3piabtxxerp+N4d +fzwWrmz8CL1ICnU/0ySzIGdKYDnF6oB57vRVWanUYknAFq9s+tofGnt0c/T3X9V5 +aOKuX+7XbLEapuvsR4ghd7uYhi6eh6s8vmc9gFYJo39tcU92M2w+8bz51CskwXDM +WEcYLnue9/yUK3fdE6CMTbFtIhlXDw2IAA82rracXoCEPZFtDSvROG0W5WLMb52J +xLE6DJ4nPAPYAfgcj7xzRzir7WRgclrWrnDXCCXlDcxXO38BgZNJ9Cu0+f6Ys3dI +1yyAagxixTJw1u6Etk9ictbr/QQYWhQwqXPkPSrelPbJ5chQrdoxN2MrPuRUm4Ui +QNa0ug6eV0bsolaqlwCbbxoqmZlf9Aga1ePoFhMicj3Jzy/8A8NIx1LhiZuLnqiV +QRhnlIVUL1fD3HojXxqVyQVM8pqAb22uzdS881gomH6BEK7B+v7gcYKGcglCMaFW +fUqx0EyHIzBTGtwldrymbshmLgcYlfuYl40eYCF9l0PDN9/azw4xstv91VF85ZR9 +5lZ88Q6/3rMBfDS8ZxwEXDIoJ78giMqjHReaQgUtVUzEgXJyTkXCH8GS5S9Ct7YB +09Gf/e4IU5EYEWO+Y6vCXyIpY23cE/mzBLTDichT4L64chc4qUX7ogr7YvEvU9LS +Ga1OOaS4mJHqmZUahGa3aDsCx5Aozs3R1Js5Em83Rr6lK0fQVevCqVagbcIrVUls +vOnugf+0wAo3YaOeJypT5JkH4JlwPO5Gfm9YJ6rvvQTugkwp6BfxnRt91oJzPoOU +LgzbLYcYnersdpXoQIvnUPsF7cPxdY5+rS/cllSx+dnoHzqaNrqOkfhn7PScIerT +fGOWrPd5gH4uOKshc/bTybp4lSgbBEQGjD3HUjuFxyfbt63MsqxB58BV25tzmabG +VJq3Z2HbD8xlKWhoplFR/QW7RpQ7yyuzyRbF9a2M4dwSP51XkNMzA30OjvqzWrUu +6s0vDppVM8iTT9XE6SyGSnKEEOIm9XzXEsVD1ZlE38QJxYo1kl6DMPRiDq+okaWl +Kbv0693AZulL2hHXtQvMufxFNNAfoO5jk8Jr6rRVXMpsRRneYn53cyAzMuk17SQD +X4LfN78mOTc/6qadfv3t7ZNCBeT9pEYWfqhed2hk3CzvfdqceX2dimTNySxcNMDu +ukrG6vJfaO16HuVnXDT/V+WDBmEDhaeadrjDS8u0AcBGbtGxjXIHqoMJf8sxEqDV +cRQuAKaRwQcAVbUoF8pwWLvEDBpw47kZCVm6202FwI8DQngWVHlJDCvDoU86n9ks +9WUSuHJSWXoYurLdXU+0HQPGcwEbjvn9GXK8UyPlF3CZmz9RJT5Wr4Wu4p1ZACqD +4QMW8vvST4yuT3mZEndqrSNCvWf1M24jhap4HY6eKSTdHaEAEhIpnbcaR+pBIiH8 +QiK/hTRsTqV5cFQYN3x3hIQQHGIFXgutSmjuIWQQ8kPVize0qE5qdgzWmCIQtqp+ +OVZ1sux46edT2sAN2cwuL4b2sc8G3yMrEA4L6imf8Ea4mRQEQvf4RD5A9Eq36z0K +SJzvva9HHZK7NIMsY4yRt1GWTyNyzApJ4dywzDc6cLvf42O/NSlHJ0NOANDUQ4eS +mbQT+oZHwCWA13W4XAlaLesakfc2E8KFMyIv95j8BeySp8gnrGz51wGMIeLeuDRP +g2CfvXZzPfea7jagkqfMJ1+q7pI/ItrQ7ccrIwfg4gQL7gEE4trGiOvw9RjTC+Lc +wm7Wj6bEVXzONR2izvFnF+PAmdPfV6xMFrGpHaQzUcvqQ5bHIgNLl11IIas2tZH5 +RK0c2K2COaX5ZmRflPUK32vCkRgCJ0x5b61X66q3J6XNxOb91RAU7BKhft9Ud+tn +gT7RWFZNQ11nfui7kplDaLEmQac4dIcYw69n3QobaQgGEzJyNqdKHFW9dJYRRyy5 +cKuHQUl4xvY/AA+/bL1HqXJFAobLQ0O/eginE0lG0qpqOuERizIUsV5ZDYLv4nWi +6qpxpIzkTiipNfyU2jNTZnSebSKlDJTFwsXM4RDHcecqFTRYwtnGQJYzaUnrRGPW +zkUy39VyT7BepcpPVbmi3PSW3LeB0FmIJ3dMihgiAHAa9fuAFItX16VnNuXuTvYf +ylHa04LNjl1iaqSo7vjejLeNrbUplSKAuhvlFVi1PolGjglX95qMyh4KiI/UYCE8 +7YngRWqYWEjlUK94UzYrCBI/snlwXUoMLfGXzFbbPnccPR0q6AbSf6bjNGns6EH7 +5349eOHjB9kSQsNU2viDxX6TIGi4T6X3Yu9NeE4mbIaU/nC2NjkYMfKDzer2G/aK +xQhC5P1k9LVKvtj4VTQqe+/sfCMvy5hJGvmKJfmT6MzQ2wRtqO+A4xDguNcPc/ti +uduNCcxhs0d4UmbchoSmE03C+oQ+Ql1Hj8OAOMoPeCisBuVhaFNQ7g68t6xVuS0d +YLlqtSVbE12W5mzZ3CRslKDOLzRWFW6vp7AY+eO1vXPg19B0BVLwuSG8t45AIzNd +GkkYiNABfZc7oj4OLi4ONxPzrPvNGuN8tytFtXbL9mAa4v0DagUu2O+4fdnGsShY +FrriLGry1GCd0wWECuZ4TeFB6+qYJs41Ksqe2aK5w4njZABKv4IrqMX3qdzGpzu9 +xS+ob7gzunScYlkV4epfAJIZXZYtf1qiHkK1V3ButA3rlQT0vK5c5UNLJRr+0ebc +DJtlHUaCixQ582mm4rjbu7yOeYek+Cu5Y/MVbHAcGJ6QC6wm5FQSE05pcD54JdZa +9LF6raw+APkNanBW8hJNSFf/ZN17Lf9bkACeq8TKlF6feH6mHOKokxCbQMTeJVLU +/4Z2eRhUajN0mXePITbxiDAvCWImsx0qD8BIX5CStGI0LGK1eAEbQWs3PKrctXm2 +l3RxNMhskAa7KX4NZr7tsd45oa5znMwCKxsLCqLEY8G13fAt6PfFeMWTC2AkrbPo +tRVVFcy7/VONbBl9+OGZjD2ZeOYKy8rqNLxKjwpxjzi0cQWx20NUHPjc+E6m5eiz +pSJoxvydLCyNwgoL5RkjZWHURfIaurb8dQx/08nQeiEtHC8RRxNcrE2nF8u3redw +14LTQNZkx1XLgWxFt/KoCjd/GEDxN9Z9sB9HNYIKxq4RA/bx398SPRYMe7NBJw4j +vshpelYHXb2Mq8jQQBehGEV0cTtr8yHekP2og4EevSXN8bcGZ6+kxADjaixjvg1W +uQL0omvE2rBGxkC5zADmAn4QAbbVhwu3xHU7/1fKFTTjCEumFzY7rvaWz2/Unvb+ +xd+FpwNH+rAzyXB93hJ5ZjmQkkzdOm0YbD/xS0wrPBeaziG4JXAIORSGGUL4QwRx +O0ae7N+cPzQbZAGty7YMo7twPY5IzeE0cF+7MDPirEI6oLfQAyqA84jLoFasQ0nE +aOKE63P92nQY5dxmsWP1YYTiGdRW/vUyC+6lhoufu/KB5gXO+n/HV0Iot6p7dX0X +FfCjBoT8Gco1zFgoa7OuLW5Z0ZaNLBTeYp2j519T0CTKFlXBN2Fd1Cj9hufELgXJ +mMa3Ykey0VO1N/Yv2CeVUR9KlKBT49Ax2EcJmNizIpdcdMJ0oXRSoBjHOPxHwMEG +BCyCh/BhSVOjp4mqkvhVmXHLJu0OV+QeF2A7pKQx3eVCQx1eEkO7mB0JUHFQkxUy +I10dWM77g/MBvOqFNvk8EI3ifeC11l+BXfq7FrX/Ne/MupgJPTllQBEVMGv0+LUE +UXshO4iCaR4UqGz4IN4TLSmVWo+FGGFvfFTd9CocwAf36OGf4p0lMYmceXCL0Ojm +Zr3JMa4XEblDwFQcfjyXxuFkzqGaRjCNn8hXvgabyyyywCBTKL5PLuGYNFsbjE8r +sCdQ3ggh8hdcJAHWMUKvj5PoPOlrHg0ZJLaYEXunpl/VpmJH0gGwP87KG/3MbDX9 +pijHjT0ba58uE2mFDkAz8ZDykdWKwpfoO3wVhZkvsHffxGkTM7hcRdPU9H+aTkdu +wgs3oVAI1MvsvyhWycNXG/Hl+KwgwhJDRbH58kKzjJApfkR72nPmHPeZJd0Ovt7L +FxgOGB5K4MewoJCMSz9uXALStvv182kyj2izsTCH9EcQjJMhYHcXSRra6x6HNWtS +KSqajU19bnWJb1kduxLY0HycVXhema9nvUVt2exmyL0q+0loDqh7MCZqtI0oMD11 +jlcYl/Krr2dSzun5rlNh1Q4ufCvFttwUiQvPakTqYvrGK6pU20LHTFm+AxrDZI2M +SIYT2vGLj1hZEU5pWpw+hiFFDsgWQ0ui/Gu9tNzzwmprtBsw//qmBTmVnXWCa1Sn +60bj9/8zGVTHN3iBFr190W5PsNh867kgX8D+sspSb//JCSCm9H87GsC94zjyGL2G +jlvM+Flmwm8DhWfa1tH8KEoKz9c1YPj0N07NNRy/XlyHP3t3srrdpiizTj3HCmxp +0mxyhaa8zsoMmWN+FabVGHeyv5j3faGPgmSpqc6q7hSl0CivAmUyPm51kfyjsAxZ +oDuP0ijIZtTUTQcW6V2yaMthc9lQbpwX/DfjK0VAwCeG2sQs8fPtUfFgbPa9NQAJ +lCB0r6s7B+ZEtlY7bkg4iOav82/RoXlPLDAKVeLRPYR2/v1hvLlFn5BEuJgnxBGB +b9yrIMb/qu4a45l3gXRfaPWSZqQo9/FKpD5DBWTVUsoayvMelA4KwYltFwinsD3b +SO0towg3JVQLMLCs3xKqVAj9A9Dfnvlia2draldJggb9gAQ+YA+2kxCYl9MeJWwL +sxSXdwO+3zfKnwaoGt1MadXUIldPS+ocWNoQq7Yk2aHZufxawpsF/5TfXAPb5J+r +5pgxLx83gqIFbrOlrJjrh7BRKEbEYaVUO6S0HdGAbkCQXGOi7XAP+EWze0Jht5As +sopeONoBxE5wk6pj/glcr46q0SJ72uqSsf/+nEvD2QmT6o+/fJKqrwSYo5X3K3pS +OCgeunKkqKvWWpXSH5pcy3cVVVjui5dgyBfYsDJtGvLnsk+gCudmkpJGuBiAr0Mp +amowujtzaYjYqsqjdAUj3L3ib55EUqHhCMo6JnykBJyiwXT4u5GSVVDWw7sb5cV6 +B9xUFXsrnT52+WdHaujJYNlFo+eV1dm0EGhWh65tVgwfKHl40nFjYY77Mk2/aTBi +RqAssGcn+ODSuDZuO4Le0HcH5VcdaLQt9Y6uV02fPs2D9Kjj5SszkCnjUAqYanio +L1naFHEer40zQ5hCyKio+Z1AdIWEYRxYHQKHy8ED2zTAjWjk2/eldUvCGC688fF8 +n0GmwJHZ34a5buSZ9Z0rYQnqdTxIsKCWvlmaonvC5QpiJveIhH/WKNQ2Hhjyt7I1 +VaQjnwKOC7qFjJyW+kRNF/gTjw+AiojWeaYsb2AVmKJJtepd1XvznRSdeuV0VAvF +oN365AHY2NYjosMBZIUbom1tma6HLb945PC1WE5SN2VJcp6kMBoRCMkparz8g0aP +Rgm64ecXkTnf4QuMIWsTB1PEeS1ZymfngxUPgDj7ltEpuJ1lU/kTxI6o78w+0JqC +ww/UnY+3c2ZjpFCKdIOB4b/SvpwVrO8vYh8i/75DS5J6Ouva/ea8HGx/I2dDDJbo +DqHDtA9ggJIBz3Z/T15ySBFVeosDWELVNwfF5hSI2J78b9j+4xDy3htkGtFBKhFg +mdJIz9x+N+1UdTjodc1o84fNi3BLGYgUQvK95UrEMeU+rDuDhoWH1kWQvjXqfcFO +DVNxTmtnKVG8/l/LDyarDLGmW/mBmf0pUYfC2+C3qX/5fxH2CLMhSGbrNsrfyugf +MhCwthOI8NX473fHvIc24WqdxK2yYl5NYfR0TGablw42JdUsnmf/30lOm9jIZfqU +EzA4kVni0RkfTzrttGnhcpbpud+a0JMeLT0eLi6hlL6CEO9c6xpvjp+nDj5kE9tU +Yc/Qyw5CvLhsuFWc3uRd2XBF3S0XYsPcQCRi+jyp7S444vr5aOOFwQ89QkT9Wxzw +AB9qH+oZ695AXhLnQV93v46LwxopmYJ2krF/YHqst9lT9DepvOa+Oh9CVpMmOwAv +u4XYrPSOnCq4pCOrd2ZfBofdpYl9jvVgEjXB+53TraThfNXTBjphv4Z9o9+hBVnH +CmBL43t0e6FKYZV2BbZUR0uBpTE2ri/Tw17ZXJnY6s0seDXKXPImiQiSoo1fQFEB +8VQMDSXFcMg9r0Ru8unF9C79gITfa7l17cIx1G2bYWCPEOggn5srcU8xB2tD6ywp +Cfcx1ztwbxMrpCaXbg4yTyd1rzha/LkcuoSU2Y/FbfjKgqafWTHsHAYyeazJTKC6 +JXl8mXT6jAO7+AXQ68dhyUjWs4pKMZ/rSUV+c3FHfDOzgDUHy3K9rDQUfILVv1n9 +HDNYoTgAjFA8OIF4OQIReHqudgGltj0M+V3EtU9yeaWbfiCRTcma08sAhCvTU7G+ +yR11hcgkOpgj0OupYQ34iRWabvDSXcqG1pzB0kz+MWJOSG69t8k1RjBdneTW9i6H +TkF7tMRil2Lftyx7ZckscC53ICCbrrNJkBzZw7SGnoDJNwKfXgnJ8l1gCJBNNncs +SWI6ke27Jr8EDk99vXBsZ1Vql9TWeFAz7PqlmUoWwcWQVtlUZO0kCCGXBFsz0oeY +m8kWpHNnh6GXG8+Q5vOwV+pLPbdaB+/qufiFLKdCDsUmErb9bz8bhVb0foStESFq +HpaH1B0y6fHbOwrTPt//4uaEIKQBvcKnl1EYHscPWHwZ5LB4QDIfaiCSP3GVAlcO +WPNMdCuucmcYv2vxKmgrGzrUnjYFc9pYuShCmZCjkb0VfDROjx1l/j4dDOsCGPpv +tUJx0HsFpIK+2dl3DCN0JNXBJ6PPreHqqXnq2kYkwYWZjPYnCnUKAq9A4eTuxmvz +bQWGLASSdXrNjNbTAQi8dQvPLDrSK+Ao3c3Ji7UT4sBH3CfawZhgM0HzNum0T3NB +5OHZ3YBvyYY8PNCrDipquhiiH2T21X26FKTGvh9lSBFvF2QOSgYHJ+uEo9X69BQ4 +jpqady1CIycSR6nd+ux2RJZVoQ7m/r8jQ+gfawd08zZdTcI1GMpoh2/gRwIJ6815 +GEduBBlu1gYLkM5XGmGvgCLnP0iNiFQ+UC9E3gnPpMx/NWV2eiVnI6M3DaRsNEt9 +rIhR3ll719lj+IdXybaVInxv6KwT7VWTVYmXf/DOoGplsC3sUQcQ9mxczGIEFKXj +eompVnbLjjLrB4XRRMQ/N+fCrdp+yplBE35RUUEQuYXPelAniO7zNRF6h988PK1L +QNJJxSzCveKxnYkUZbAAUJk0wXliCW38JlGvRsT/3LBZkd8BN8Cd2Hp457Gg1Xsp +YNBV6FLiSMJtxaJSojYlYkV+KvjwaAGMw80WQ7klAyA77DmBRlyDLorug64QVWDC +ooyJjkRcdZYe1Y+oUY1yREYWVA94F7zeoPCgWJlF1oZ5W6Ampc6gBx77yTZPz+WU +BX8GFYOP0347Dh2+OLEIALq2itQsw1/Wgb+tUa30xcUMjWdvadT2YYVGkTm5ffwS +bpYMTzhed+Slh4pJd+I86HBSWIqzi8qpN9G/G/X1iMA5ZLAFoK666lmInaFvsi1x +Bd0hoa0VXLsXKITJRXoKgom5E0A/0UXslreA4EdqAoS7ce4dcF0GbbmNC/abY4AZ +cMWde9XqUhW4qnl+A1CAUX11lwXiJjY/wfmosIneHGGIcKOYqdjVQuF/sZgsirM4 +HneU5zGVrxW9nlvceUJjEwJFvXVsIUBds9LVY/GjD5eBrd+waGO5KziAn5oAGbjv +cRVo/bqS72JSpx1vdVGDghoFJpLhfy62hInWE0ST1Ggmv4EYaxplp+KkT092F+BK +d2q4KMJ+dgIZ/zBCtB4OsXbpVLZLUO1cB4N6J4w+8gbKLG5RhUNIRycXJF3p0Mf0 +FjVmI1lCDi4m0I2+BfkUkrFgHoDo/DUsgSffzlvspq+keYLuzlUeZGbfU6QQuKFU +8yC7HdDEc1Y/TylbtqdCWslZisHGxsDaOWE/qubLA/Fb01V3OWk5f7ROfIenyfgF +HWA5wcRtUhhPyu1gENQDI1tXAN0quOgbrO0gjApPL42CR5VSUXmBmsNACtm8qkeV +Whvjem4ei8bbW+tUkNGidJMkArBBgISudFFLaIOymX+RV5JVQXSN+1O332VGT9RM +YPfRziBSfMgO+7OXEWN3t6p93KOZtZxoX098PYLFxNwSKAXMHU8Msscz1AkJbRTC +uZw9lAbWQd2CdJW8Us8gegujVftF2HCyj5XYeWzZvQUDklb0GhURzh/Thx9HbuWs +p14rat/v3NoHaE2WOhJkj4BuU3H4JizEMq1wMFksNKbtsQa9ms0UKH8jmMnZod0M +xTIOoelcOObeEoEztCX8lHVXYIkmaVPqOU8EPgfLk/O5HZKtDHuieR2K/2PDcLyK +uMM7hoPTZI8LTXw09bhML9dgJJY0xKycJbWcBsI29VDvwaEk7pztk1RcVFU4m3nz +SWnhinsCRkLZyy3FXWtNRaLUAJWHfCZdjrTM1OuJio1Jk1jkm/aGAESOt9F7++Qv +316WOZZ1cdLq5HqNlnmvZswM5xyM3tDt0Wjnjx93G4m8aDtG1f8+5+Q8tzeegmAz +9ksO7mKenJtX+9vCIkyit/6SWa+EZmwNVfpY/4n/xexsN74bSuZIwDEDYfTcCqbd +1iTiAMX1Kt02XhrLCW+MoZWWy2FXyTrR6JEp4pwvwzeDBNZpZrvM0Lti0M+cpVeF +7jK1EAUXskVL+wGTsc19O9Yg0VVK+o1h7GVRpogfHcPHPa+3558U7yb1S87p3Huy +0g8FX0O7AlUN8AQ4slZm+eXPOUmW7DAZ09RZwrUKih6tozpI+i8cImPo1WTQxmPX +WbLu1SbX4cX7FRLWci2puYaBsZifaasY0J6K6rkqzbMc1onB81QLH8T32VrdEdrT +a0lQXPbGMI3MLIAMxAyLHmkP/el60ZAQyqOHK89D3+fBdJL49cuPq66qdRmYkdqK +wJBqtJBjFMAh+WNMMzezV+fCI3+fSpZgsBlLqdUq/COz/8PdHpFFimkcjM+V5nE4 +BR/t3eioEfU1XjUZua2xrVKw3B4q7UpnKvbFQVKnzKOT9mEta01I0HjhLAApuAPP +G8ytLhf/SEwm0hrxP1XFN2+e2WXSOvXmnvVAjIaOxpw88yZAbKRhq4Vhdx179G01 +aXec3xWjrVzVqwKGbqCsyOukqmj/K2zu8R/eDeRDauCv/6J/JzNoFlBOp953va9B +PoAADmGLkUQJlWWtj4KKJTZxQIMhnsFs2FuZCY+7WGuZAVWxkIc0P3+SBlwIOKBI +Ob6KxQAI6K2NgDoLnZhJ5DsD5rDM15u7C3uWm7igjUVf1IOkCT5m9GZY3ZLpbLbB +3YMkOvCAR56FntmL8BtnRUUJ9cDZSvtaJhjq9UX5KCf5S3zN0+GhzLRRRuaTWT0Y +zF8DX8VFU8RybIssW2gM5DWz+k8UyhFmfpXObwxjNEm0ssoj3IHG4J1j2ssnPiso +WFO2W/U/dC4oc85mo4mvEcbAV2QxlYsSLpIdYzRfiQQGt7BmFOaE0saIqIxSD3lJ +6FHJsK2PA33uxRRzwgP13IAziOLdE6jAf2RGzjg7SrGc/kId90Kfn3C+oDAqOAkB +SikAA6SudIwEgwQiIA6XEgFEZNf6yFj7MbABgpD6pzJZiRx0b2AL1UzJQMnV6qPU +Y2WduJZIKmnwMYkEhVEVjUVQPbvdfoepHVoW1U+MrrgxyCFmQpT+GBDOJ5S4rzQ9 +kL6h9NJBWs+IRFbQDhWj3fXwhgBDxJggmL2tHerhEl2i+MVpEmgsLkj0ODGwxz43 +uBI9mrGbSf38rKIXQgInaqe3qQRV+gydpoatuOH6JOREURHUJSv7vt+glm7vUTOX +55JxxUorIEqDPD/AQeUxFNp68V6eoY3yirVMwOVG8VGW01t5KpFZulnlNzKKjGJY +v7Mj+Du3gheEpM8/cdEGML0SB7yhFiQfEXWWZNuyDaqLJTVqgX0bm3LB+Ir0rGxG +YTwtiOAZRCrBdXWTllTEf0XOiWLEIwbXSOicLvKRXNVE1NBLSjTVZBjN6IaXyIKb +vYB+NaAJEPGs4BqzpVrQFtQpoYho21k2s0WuJGtHo9leofy1LMwtkDjbtifIWXE3 +devuZ1CtpXKfit1lrX3g8dy0lxKJHJyBhMA9yS6aN++kecb8FdgXQIaWJsxP3ZsN +VXs7fc6w3nLIkYozGHpCB6GELolyQMfDDSt/yDSTcD2oRwtiR9MfjX902MrgVIdk +lVdcm7VvPqX5/Mvmouh/KEu9oAbZDxNbkCOJs487qJQ6p8ZzXx0vh7Co8eu3XAy4 +gFf2L4DI7O0q3iy1ObWZB+3vVvDzUZ8Jx8H0pRRHKSf+xito9XMuN2DpDrCzugEn +9x58sVbwnfdp+m2KjA24iLYLOQ7usb37jaju22Fr/nunb3wEFfBmsJ/cJzk0b6nD +Byilc46rD435al0fUAPeOZ5RqNzAefJth6jiFiPe09hi1bwUJrMNI6yekvsMQI2C ++VtW4n96Q1DVMCX91A9IYuXYpGCOBp+FACXMO8nEmCnGcm3z0HPJ7hIFV04/dBnP +RvCqNZGxjyT2o60Hq/rx0Vjdd9YRXEuT3ETlqIGgGag0wx7xWxPsCWEPxc0POZEw +I1Jq31EBXjzqRMsNoqxYkiu5yyLiFFrJ07l43qWDnHVEQiXSXlF1carexMrKvrr5 +HkCve9glK5Dbyd4RljTxinMIkXhC/IiU4g3SKOqDjYP/E0GOCqRx67S8bZ/iZ+Pr +yRaaPTVrcb72Wf8PxWDNIxl9DvM55vU0mL+/GJKu4+xZga8Gyr381fQ6nrqoRA4m +Ke9KE6bK+2N6RclMenfPHWeunGzHTVqYK8G+nXxPqEjDuKFMZmLdH8VSvb5ZrWnw ++Jxx726BOegl0F4E/9/qY0rqZQm2cMaBdrkkaIWjVe5BdZJfKyEk3thDkt8Dpzz7 +mWX/j+8KtIy61yAhevg7EMQ5LKqpUgbIqumtiyWYqkzm2gGiSM932velx1etu1C9 +6w0LzBibNuwNljpK8Jw1GWHTlAGm3hk32Zvpn4/wYEj8CERiNWV61aqZWZvUk8e+ +/oXOoL6c07EokMaBuCWuxo+/tMTEwP2P7Pj95SAN68lvu5kxkngVI6Pc8nlLT0Ld +X/dGtqpLzRvnd+yKP+XZmjNcKqzIMEN223h9HP3obrTSKGeHrsnNJCk4jwmfL0oE +LMK8Y16UN6wZmA5be6yA+aDmYV9RC5VxkgmpqJOz4mgxWwMCbZ3aKMf6+XRZgxS0 +IDmhNL930KuaNbdH1QSmc7qNNWMUz5Gvg2FKnsdBqw7RGxdtzBonQS26kxEmzNfJ +ghrEhC/plvsU/94zygto8whgSKoiQM3IS7U1FN5PPUjOsYqlpQxKmRtmgTajL+2s +MmhB0g/glCqxeK7g4OZcevyiUZ6JijCP48KRvQOnF12TE2aR1k8yeJyqk5nYixpu +9ff+cagN6bungJXymZK7kzccAF/hE/haV/dekliZjS/BBMSVdjapjGbQoQnn9LyI +30kXV5FCuiB4xwr5JG/ZAg+NaE0Te5zXbvzeKSsw4VAvuW8GIzmKGCtIyfGMuzCZ +MTxB7a99BsD2dJ5wbSS6j8JyYf24Jr7GzFoFadSlVvQIW/UR08/iekanboMu5lk+ +o/b+Pad8JCbXzHnItMbq/m8HI0ev9ZKE6EuZ/0Xne6tf6VtwZdpVdExTkFkVx41F +nWPCg6axY7ukmcPihvtZ8sDXAfL/1saVMHfaR0gfckFqqClLNruDn4b5hTRgfwgI +CT5phFswKva9uEHXJTWrXbjeujcFbgRTW7/qQkdmkODZr4E+UzJVxupax7CCurvP +4KLBUdOJeFWXtnKSlEhpEnK1zwTiS44yex7teLtOP04Yn2PlkeGw0F9MpovRjd16 +di4odTWjE2ht4QhjKyuOWQ0Kn9qRNQIQG6xAMLuSpuD6HilZ3k5ReoX1s3p0uPCT +6rc+yQNNKVLIfgwbeJuGBM3M45GJgdqOCOEQCAAyW67cWK9F7hPDzhbJxYshAuU2 +aFW3ZiWDV1qgc76k2RcQnjN1N/2hax8ZUOJ7IIQCHp2goyGYRWuaQcSg+wMvJdmP +r6+2gsB/0thTP3zqDBVXlx2xQoPxUAliUwZVH4Qv9VPHe98XTRhvuZ9lRflEn6C6 +iDG/Y4jTI7df5IM4hJkp60+S9S4HhVhIZk/vxdN8HHZ0/Skoz3/yMLMM+8UHQQ7V +uCOfNlnF0HMm1nL0sXBnmlgcPpYZ6gKV88pvS02J1nAYAC6KJuhSJudQPcQvUgvh +PmFiFnx0CuaK1EUhOhpuESAk99Yxeik+iPJlEdKv7AHMFGsF/vmmpd5C8uVZuitz +LVpXpo0Fr6LiLfYjV/Hmv8EEsBORBFIEpHw4DR0lAuqspbsTR42w1mYx61G9RKC7 +ANQPA9MB2C/4ZP0qmQBHCf5mD/9LHhicrDatmUaIu/a5/aMOjT4kkZE4NFPKqJ+9 +KWFYyRGFlDOtE4PCA1ydtCaIP4SAKdnZIW59f9MPyXS2XxEv+DkXBrz3P6HNuSEl +ZY7Mrn84t4sovlKgWNlWoKr+B4iQ/aZWkICuK6tLaBE0A0n2SNtL0drb3tIlzNlu +t9oi1lp+U6aha0gbi3HjHArCJGa5SBxIGIjRd2RiKh1vRMJcX2UgUdYLa5w7GSa+ +3nvYjZwb4bb0NeGV4xDtmh5u4FpX7d746+9B+ZIJdgxGOo4MESGUjfnHP5UQpcOo +id5Kv3VR8d1w5tOJQ+6S0z88eMMkJbekhaR0ITqxiD2LUSpMoJS6UjVppz7y24U6 +u0krSJgIpVjQdB65lJ9xxkrW/w11m4w1ngK5nETSSKXiw8jlrLHbm9N6D2+zVx7d +VGIwl+npyIZi3gXklpp/NMIZJjDBkWRxyRo4kSbvc8HHq8m1zHdjUGs2Ngt71De4 +oAyN8Aibx5MAFRpHQNLZTJDLRB4Ro0QiMXEFniXxO9Fl26KjZHyigzoyH8hfCQms +LRCFVarkPAhK7aGyvoqOnExIIqwROa82DGdWzyUbenwRVJkmFFJSe3NYO7KboGNr +iXJs7EgnIp00nWz8Pl0jOBGdFP8e9Pjt2Cjxphg2WbWLxwC11eea1nimiqTIDUA1 +35lzIFI+qvdQ7R0cGPJDflLeDuXBNuqmKLpr9RFXeFpYDkGwB+RIB7tOUzDskKqb +iCCJdagossRAHsfpOFU5kcmPXgi2R3IxkKT7D+nE1wVHbafuIOiHuF9MhihPPf/w +E0jSzpzsCh70Z76sZeOXwKMIYeqB+Zr56dawA+widA14x+CQ/SHfiWMtlJy7yUbq +du9uezQwO6Gg8H880OLAqYbLjSDRyL5YyWM1QmQ9btprxBWkmZK2Vyo+kot7+d8B +T3jeyeFLkYeZ/W0cwmf3WEf56NFyEKLG2p4Zw+uqLL1K9LcJycSXU/hDB02shh+c +w4H1rJwZL+CYka0ojJIWimvlHsT+EnUE+y6nwEAgXDcrH3cxHQzIBTBk4zKcMK8F +hZtRLI1l0M69dJGxmjSRR0N4n4lTGsHiESIXM+AjjmPOca/UP2OUx6XBE3Bq+xBu +UUpqax0ea1lzzoht6FNdbETzWtPaMd9FeSVwipuLuZJHPb72KBVThraCMLyZ3zGq +piakpfy1qRzvpDJJj1OOzp4p2FlIRefl3oisxFCYymunHWndTQADi+1cbMYo8d2H +CvsRGsPNl+CNRM3Bv5ZakhJb0edS6g7VeLe+Ow/dMsAYs5MC3+6WcRDcaDMo7rQr +yJkSzpqByFm36l++RlDtOfbdbGHY4L0uShmEZ27awpeB8uufgqIPqXEaLQVAJYQN +or9aIEA1d50m/MXlCLsdUnpDGCoVhUTGPDf4VAAlkdkaWq6pslIpzQVyDyA/qvT7 +oo5MHzK/a7I7pprryPjF/CcUUR+BivIEn6viUzhKfBUjbY02AZxFp6hJaXrJ7Omh +J2Clxbqa5U0ZduBV7XeJTeacWAZ+GuA2su+9fVXG+vo1WDuIbOcLTj7cFlfwh8Ed +HC6RoJ6TbRlH2achWLWOny9xGXsYzticMLR9EW4lAatXiggxmAA3o+LFmnOR3vR7 +U4MoGYmF5JazD9EQwU/rqOXaDawYY/uJORQ4b0RjVysYUQTGTVePG90mz++KrC99 +L5zARP+hJX3EwWO5Uam9cAFk4TQWnF3+2c8IIdmE4bx7v8PR+iD17KPvizzre8W/ +NbTbPm44fQtRjWIaes5wAthCpvN5XO8rEzWk+9/O67nt0dne2LtzwOboJBTakGHk +2Hq49Wbd673gj5F34RhiwfujFAShyYhAzJyyiwStnOR+sa4yyP7hPDg+2KQAH6rX +ShXsHFz1v9Ng3SgSi5sS+pE/KqzFNn7M4Pd9UnNxByR+uIGHbnd+AfIgOlhKBBoc +DSodFPx8nz+VGLNduY/dnR8G21xbuULrq9aIPbsGis1PoNJcA3cg3AMrIXhyxi3T +RS+azmw0dltctVEMDpg7pnkMCNS64y15evpjJgdjTj5QChU/Liwbix1iY6phnMFe +mf4b6TbKajmPoKZnC9eZc1Ik+wa2lLx8wlhfzjNMNhqHJpM3pQH/EG4znLKSXhOG +e55sdzwPxLSutKCj6SL66578mgotLieN717LmvPZuYJmSmENECfqgv9uktaxyMma +uLvpmQrALvM2hdt1jh7vxDHBpJzJXRIPkWOchE9DttfiG0n7tD6xAGWM6eCQ906w +cHFlprLy/xNtkepQFByTHrmDUCvBMcDy8Rbu9G5/NqtXbalXmCHtEEPKlPx1Vs0a +Aw8dmY1HNLfkOS/Vwkzrf2WQAtwvbkMxa/Ja3AF5sS83wYOjQYL04YYz9z1d+GRN +Kmydy0cuLuoa3+u2sjh/WxORG2ZD46wX1QTLJCom61aKB8gbvYFAKT63O82G0Yx9 +5Gm0SQb8Q3DLRpuvmah4UeLZNXy3LuNW2KTuKm9pO/a+ogHglW1q0jWW9id3X0vQ +f2uFkyD0LD2Y+st0YPaXFaBVSshVJAdpHVVAnXioEKM4hc9RAJ+8I0HxXP4Bsdit +gh3g7QaE9oRhluG0sA2ryQj5Mct0639KrrbEmic9uzVSif+vEkLTcjwKrrN6wH1I +oUbRCy06vj65u92OiU33OJxFpQNnFKAT9CYi1P/AGX8urnItEPH6PDOpqkPbVOj8 +sGFXyBk2gbVc4hGiRnh708ABSp6djx6WvHJiuc0n5cPmcRObCswx4IK6H0aVN/lc +fc/Nn/DELgii5KZ5JtLjIY7+kMZEq0Ga30l5W+RrNHhw+wrRYafeabdHSjL/3DXt +wEN51ZgFRoSw2OlLVz1iXcWPKLc0tqs5ln5ucYTj1hmff1tqGF691Mbg2DgDtKqr +tgWd3TgP9eF6HUL2Ly0BNMOhrqmVffFwUcZb5aU0jlafUCec6wQmQXsMj28gRAv0 +1N1zllXI+/InxaS6p2ZqGS7HhpWguJ8maXZpNjdwOWe+4tKT7bHsNgi90Fz8IJJY +Pv5ELHGgiHdrh0NOwKCiEa7ycQ2gSuvrFB2zPymznDOW38h+kEz7Lkt4N1NMWIBR ++Lf2GMxS2Wqr2Swoc+TtyNgDNgNQvOT/lQg6/f21+8BxU3P2duyKQ5IPsG5hkaDr +T6F3+baDjXVgfiAc6Z3pJyKG3hNzO97izTMbaY2dFZlNTDAIH7NG0gINZzEc6VNE +3ccqMDRvXkHVJwUschMhvW/GHuZf0ry9hcOpsHBd+sLkbY5ya1JV6AJ59wX/86P/ +GcXUXXBbJPxnN4wjTSvX94ApfjSJmMfPAkeLNktrpqvf49kSjmSmSzVeGHvO9yjI +k9dT6eu6pioHz+S5D5PI1zG1VhmMAvnOyefMPEFlkr1u9l7c++y9lQ4Jixsek1bC +kQoz6ikEN/tYsZe1GT3eTrZKXgpSX99uRFrdwIcsCRC9IMEfRo8y/Zy/DkGRiYkB +B5mgFgvfmWqjUVRI/vNCIhgabfx7yvMgcCc6U1lxwLcsZZKkj7zaajGukHhIbKy+ +EZAKJwTcLE/jkvuuyvGbGvuvqEI3+P7wXudFgMLc8vI6u6Lg07xW0Yyk3LKSglYn +N8D9gzVoOqglGHBqypgiNOtns+xPsm4p3xin5UFmysb3cIkHYcoDiP4Rj5S+rrfn +7Mq25ljKn93xKtQJzOLSF0dwWN+7NGAKP7yFBfyTXVp6uzJNcjHUfhcarfS/rvmT +pwEEzM7S5+Sv8+veOgudcSvF917baai/3niqqSc6tHEQm1nQNg5Mw9Iy2qIxiPkB +RKwCTKHBd9VG+H8ezqVjV2hEn9qo0u4ll/MT1wAoB4YIdWQou4VSAwrx81rf7QK9 +uppxed5GxyaLjnfOpxan1623gpyk+HFw8ezF2FpVXLU0hb3FBlUjTAbEBiM/yEfk +mJa1rZL7VBA4v5e1uUwO77P6ibaFMkd3wgfJrq93jTP1QEtXBnMpuGrQXSEmEy9Z +rjX7zGPrC32wYCTTmbfAwEetd2XtcrgupfbWm7fRR0ZXKKh7DK1Q4HpYZyd8f8ev +I29GLssQjiyTlj8q2Z4/MbvOq/nlgda0w3rrbHikxUlY63eVuuG2wKuRo1JVF+BI +BDwhlk6awAvK+OyEUyw7UTlDu10IEzaaN3oB6MbDnSD72e6Gd9vv4kja31KLm4hC +JckyLk1ZEsnzlP3GE01PtxMdjt/HXUGRLwEwaLxL550d252G/hzkObnrg+VhnV7O +qz1d5lU/pnDqBernB9Kwbzi1Tj2zXkort2i1q1t6Lf3ZX5M8htM1osAzbOIllMYa +/1LCED8yM2ZwOQYsY1R5fAzn0vk06c+FEvwxbBOywjpJaQSys2b9sxYMUEPi+xmX +xlDPPI3cDCeBh4GrDORPQwzlecd0pEG4+drBJsm2/sQfpz1xI/HekRpmidxCmrpc +9Zo7nNV6taI5Ob35P+Yez1rMw5lHcm0AZJRD3bk95ApLX0g3/n9FzVNWa9x66uGq +vQP9YH3kpvOEOpsKRCxIngaBUvH545XiQW5TAGTS5RnEdEL5jktVI25v635fA+Ae +Jy571OspVpxiJFiV2kux+b2aCrGQFMpGRDHY9Jrf7eNz3NMtzAtkwjUI0VYl1zlV +lDjdScpuYM8l94m9sUzipjzUktVQzqhO9Thjp5UQUDe30bTkH3YZ4A== diff --git a/scripts/mount-shr.sh.encrypt b/scripts/mount-shr.sh.encrypt index bf6f0a02c..c5ed09837 100644 --- a/scripts/mount-shr.sh.encrypt +++ b/scripts/mount-shr.sh.encrypt @@ -1,71 +1,71 @@ -U2FsdGVkX1/HExlufrZrqJxtCMZM2UWp8ls9HQt6jqnVlh1yFSm7EoWl1wJJ+hN/ -ejTev2LerCr27Jz/SLcpCscFle3SqKNcR/eioUtlqVMK9rHpTgnQe70rPw6M8x1M -w3ulxtMjNgV4a7LEspcqf6ZByP0sxjSxE08D69XZNMpnzfZukjbjjB5nBst+pxNK -oH5AYeQl65QcMQHZM5oG7DGvZtz331asBqboV9WnRTR2B0ExGKPqyGDm/P1WvRo5 -nAYsCM05dQYs7NiFutfBD6PjCKkAqTakBQbl3UGxdLAfEnoukZwb7fG+2+VsY47I -1eJJsj+TmllNYCtWxujPVURbuEeexOVWUYUIGcwNwLoMF3YIwZfm3TX5xeMxfLUH -lsR+yC8KI305Z3jN68eZivnmjyZO+0nMLUg9/v1wSC4lsNRO9vM/3zUUqNGNuMKM -mc2Oulnfm5q63FH+bZfv7/wzocRzIceiEffr7/VGSZJKFG+TeXM4gZQHNRA8axzq -EIFdsKVjoBrn3tqZGdimsTyeD4IJZMllzIbLXphcUd8h2+xRPA83weETPugFIwPM -cKpRuowZBCbo2UN3MynHJzwCxeZ+Hh5G6LAG4HCEgLwnPt5/HEomjGEQc99HuHIM -Oi2Y6LsWX5TEqyS2Q53LcoiFsdXQAoYp5CJU6boa8NMHz4bkUUMxSTbPePdJk649 -F5YE3wCuldZA5SdO5jpAeh3dsWV9Qn455IGYl9SZKuL2D9XelR5L8PZLLsbVY2PB -P/aPJUWh/H60ntjnzE/l/jyRraiegHqGXTIUXjGopZqjAtng4hC9awswALm2nd1x -TPHHf6DH9pnEQ0Jn9GREnolTinDIYfgKPXrymoMV0fIQxlNg49x3lxnSffmpdxMX -RnohAWzcP0OGjdAkNi13keiTo2akbowTqjgI/+ziC+za4ZUwdxtf2MO9Xg9vowFl -3KeU78sg9ABxihL/W+19aQiIiMeFV1Rp3xSmYB0AfwJlY/gJnrP47PAo/+tBKTmu -gHR5F3JmBd7teWoSigup7dz4Wwo6TbtWuJXEqux6WRxo0wOHe8eRjCt30BjIlLhh -CzdpKmxDgQkTHy/oDxC+pqNgh2QjOXvSqy58+3ywXEZDPIJL1dTNIFzNQuhWbrO/ -inVdUZ33AJsbUgfaMplJ8w/ti3o8uqVM2kezSJcDk7OkTirSw3J41N9VEcoKrkov -kzjDbC9BdnKsJ4wp3zZYzOu8DAmw1N4Io/C6tT23zUOyRxqf/zF3faVgjXMcbWLS -M4ax/JhSvOsl5mCNLC4SHfJwZHxwcMEgIVdpJhM6UgaA2ITVUz8yjXyCyZVDqS73 -HtPAxeywTJes++jQN+6rmjZ9xcmXXyJxnxmHoRSuWtTgtm0A6WonFGFMUyrWlO+/ -XZDGhMivylUlkXE6m/tJnMHvG1FMzi2q1iy8VCf9DihGWhw4EfNb6K8COwyKTnE9 -xyuA4xy+i9IopAr9YYyX8btFjiZGmfvctpSoNVxRXUP5lhtTbztIRJTuALeILJlY -cGgyE3Uu0DhwZ4Ra/ADuIFCZVLJ9tdeXauwvbnjXJHma5BjAiwMJMVpAfhX8V8ov -hgF7jMo5sUIxklCjRnug29MEQ5tPJ1v4LodnbKCdcN4aQgGptpGTa3u+yT8d2NUc -Xr8O4KGoGEqWQAx8jlnwQ23HOKgfRC/LXxdMfQARerydUe4F36aM7d0mSRpyiyIB -UJauYnFBrxvlxTNAj0ZA6LXcoUp0wyvcVLCL5AG61X499UvYXhHC6dDOoJfcDpww -rmWG8/zpq0O9De7lh/4a/NwjS31kHMSaUT807ajd//t9WkCSe/qblWAfnHCMzlhz -nhEasSo2rwEzHWvrTinBhgeun15nBrMsuekoJsGGL5HDV3b3xKVgMQAfMPBRqqjo -Hv/o1UDu52HVqvbnEphvmiObhlzU7Xr2yV4BdXhBrG1TsYejquIbMsOnbsRCrzPq -evuvw3DmFP7kHHcogFR00kqSh2rdM15MYm9V03EydlmijmxlaYv9D/xXOocG+5Qm -xW4qkgb0Ar9kZCwHyK+c8xxbSoe3jlgnXnEev9cKfEzux2clNTp52yVf4YFy3SKf -jQ2D8HK8YasctfSXOQZXfjLU6Kp0TBJgjVECwyrv5IGjxfhQDL4vH1lzIQWthD02 -ocohE6H0mVWToMinuCvppiHnigxp/rsmsJ9x8yGxACOLHmbsI0JpCgcuZUKb3jG7 -j4j56fXxNv3h+rPQ+AqwIdyq6UfhG6pej/PHAAuA7a4oB2eb6r9jCiqGxpxOpZrr -0o96OgVT8QGeqRfRCr7qRr1B5mM1t/GCD6ApNvi3AjO1k6L91gi+KpZy+ZlhOgIJ -eJ3bACGWkrwJjS5bcGXehuzLfzLq6hkOBzUYTcWDsXT7mfglCGqPf6dylF8MbpUf -jUsm0A5fHaNmN7yTJNgWgTZYQPJolfgkj8UmgM72zsrHKaVsYItgd5kcaVAYl51e -OPkRpJg4TpylvXmdg+Bd66gSO2WeeiQ5Oqy3fb8JAl+R0m+rJAR+t37zOEgIdHf2 -IKCE0EZWm/n978dvqD/UjHHUfgt0bT77lm7jmVLl8VSDXLcMqY2gYWcfD2A/EKt/ -LyYoiufX/XjoRBzfIYeBx+eSVbjsDQj2hkwLglFPrIVLe9TOwXkbQMMjCoalUSpa -+KQfg3AgIsKaa0ri082degNr8pT/k21GIL7xXNuaSnFQd6dAR2i+wUWtAb481e7q -OHx10F7DNgl9V6CJ6h3Ttacg8bderWDDF3Uvm7vI2syD9/tcXjuK1jBh6BiGa0hJ -lKdSAp1I53CWRzaR7wZz/ZVxzg9ynfLj0TjD5gkcnGPWa4YW3TcgS0yV9JnDo64r -cj1oJKdhHVTtFGw07VKkhtj+G83NVsBZ2EDRxYvDr9xXHLeX6kk+B5Sy8WnjGpup -4OFl8sZ7bInSBLaqVRiX92+vfU+f/yNPT7l1z2E/mxYOHZYOTDgyxwo0FkD1a2me -+9f3TKuCtjd0yJ8Wg/Jj4PRzEeutIjsOSQbfRI8VrncFn17qDjafcvrWiwrrRk8g -15HI5jHATDyPDDHFLqKLwAKhL6Uo7SOyP2bSiVYCSxPMCL2R2lzc72dfktTKIkI6 -zVwmVmKpmvPGuC+zxqLsc9ypAWzS3VNpxhprtTigkYeVi5p4/mj7ablylAp2cb2q -jXknMLumo7zmPuirB4yJykUimF8oa5QLCBfKgMxkRR2ID1BFD1GGp6n9hiO+SpIR -stsVt504PECmXFjQs9sXuejHuCPz+wJCkzHCxoiUaXmcuLpmFXTiKcebaBlSyGUn -2l7cVVS6aUX6qd5Tvgp+fP72JS1X5OS1OAimqcP/+OTJcE1wlUDpRrUd+oHwU7HY -oWT6Xg9u3Q1wCPd0v3ex78Z8RahhYLUYO2S1m7w6IQ1mq2I82CsTBGGO+SiMVDdz -61zl9atrfNTH47WNP9/8Wg9FzU8OjRkAkKsv8elnVdmBIUsr2FxFLFM3LuuphGFj -MTytcyg6Ff3P/vpObB4vSl+yFI8CPnPI1EKqSakUOOkASnSqHdXwGRrL82SECX0B -uddS532hNR2VRprkg4K4IAcmzA24NP+AjfOnk81sVEGW3+2Qd908l6XP4ystJ5Rs -OEwHo9/kdGlYVLiNvnkScjlDolXuTycCQFp4jHgXTphuGyi9GOhAlCN1Wvc5drbM -QnBSMXpfOyhGLuvv2DP4JabpwYi+6Ub24kS+H0tUrDW/n1EOSoGTW0keJ6fEcTwS -vHtsmUpymRchj9b5DT+37/BMzny/7zhqP+U44A9AGv0HdKxo05u5pkI/QQXSDTDg -S5bjOf2Crl+ITHml+SYDSqGQr9S/ii53mn3/9IjRRIHZJz0PC8vGL6fxDCkX26NY -BHQ9BUeObkCLiKaFrK7ppBtuM7OGNvBNpFF9/yDb0yIsq4vXLDuXY5tHOlVjqhM6 -Gzw2UKqvlqLgZz7bCH3SR18cXdV3GLR8Zi24wZhMaIaQb3z2TdlkaZQb+5G7MSzz -zQ7/eFkAmn3kUcxpz/cB9AG6/0yLitegh8YJBQenLAZMiDtxNs5mOk8NIfAQ2CeD -E4zYQJM31i2h2ELF+pVjcFP0d7RXQZX9z+ni8ID3fkqgBOezjIUbdqipf9HpnRRZ -/7k11CVTlpAa4OZS25fTLFbOOUWF/fLGTVN8ltAmZpYis2a8f2sUz+P91KXP4X0Y -/1S9w9EeDCjTB2CCAm+vk1BmPTMppg9KWYtsOvfnj8n6Z8U9zPeRpPcrhiLpuxIp -P8M2/MX1L9qxXtG9r7BROhBv6vf+LFoXOiujoSRszDT7RoqIw7+trYsVYdV6oH1g -PZJ7hKF37udRyd8dKTiv14JIzYupt8xfleiodcXKy17nm8DHd3qajM67JBXTNGwE -vG423n1n9g8Ml3sS4XBPoDew+xSZqqcXkvWnd+FrjAbU57YsoAjpVbrI05DfQrWz -FONOkPc6DdilSd6zIF+taMRzcfNlgRDp +U2FsdGVkX1/97joDHr2PfbtM8603L6QRNH7YOTwyx6+WEU5Emp9YwRLWVFdZPxrn +GfDQXpdmqZ0shGjAPjfhA7ZMVz3qmWZHMr2eCMZhFkoBT4PjBYf/GVfW751LYT29 +7EbpJZYIkXE45VWEw+79zetmPr8xxQ0A7Or1prUAqzUcjSkPvO4iUSvPpn7GcubI +3Pcmls6W+Jd3KrT3POaIPFQsFKmp1lBMQGVEk3/4GGP3+aDhcDqjuV5BmqFs55Ue +YhE7Rcc4nGeCPbKTYe0ouU5VR3WVNJX8+UIvzL0mH+01Ctjn3PcRODzklTtqOeHu +VBGHwAH+NRo36iMw9wYZJZkT/ay8UlVySvKrAQ+13lxpKA64Eo/p20rv3ht84QI6 +lc8JgNzJ3RKo9YCK+iz3UPfXqPXen1q3vto36mCdvP0pv/Epz6PtQaGxXO4gEFlL +jmP6IfGurMa7xiluPxjXFhhb/9I0VDTFk2dEsalu3qSsvabRjJtL0bSW0jWFMTAp ++Q7JH6WjzSJQ29qCR/NmiPjFbl3AYIcQjtMJdziVOksZl43IRM1v6XOVfvo4jwOt +C1iWfqIIcLzsW7ccFqDAoGFwDWBdqenL0KFhi0I5Goi/HJb1pJyYylD5PChOv8rL +OnXH4jDvKEeNnYv06rmtR8rEzngDLt6x0wKQx+EXmEfXKiYbl7Dfp2tFLbVdSNcz +j6uXgIFaRqOHzYD8S0J6FpRz4dTMK5hvKxDfhzg3x91alQ0zQr8+kzU6kOWq5FF2 +2WfEhFYKZTGnL+E3wgkeN4an6gd1mp2AGJm/aTDBIZaHe75uUpfg1Uqm1e/GhNNs +wsJlNZDEB5FjAi+11mrGUUuRTYSVLBVmkYvMbT2nG2QiUuC1tAsH/Cv7X9aEXAsT +oYNn4pAlXmHllws12e1RWaOuxkaX8R0rzfG+dA1DVXzzDNZkNBS06Oddrlp8X7Op +Beez9+PpvMzL3X2vuMiTJYLQi2kk/wATh3DHMP5W+9vHxGYcx7O9G1foFiQZsy0y +sDaH53ge6KNFnS9wBACE0R9vEps7oruTCvxAmvFXv8bXtJx+JmDoFcpg8aN8dqvM +B0hZJjtzpdPz0y/f5cXlkhgGXRORwZir/okdg8ffs5WqLFHZO/MWYdWH5/ws+DFs +eMVlxafcmTvgtMZjRNO0Z1s0TMCwDCBy+mv3YbyFLzBcRZFBuStHfdc3QvOQNJer +J09UpweP5N7aQnivgbAstAMBIHR6WfRBmASkDaN/YBKF4PiPWgOokEywiVXAF4Iz +ph2Z23q6b+fTC3tBYRdPqm55cx/IkLMLJmmOcGPCBY4fv7V9QpPuYZvPu75esCC3 +vzPNxKKmn4r6dwwwP/R9xiwAIPS8ZE0rtD1xzi7KECsp2AMGFtg8zu+Ve4xjJUxZ +HHCcydRxlb76rz6UNHMalw1ywtx7afJdzoAwQzef0tmnfAI1w5QIi5bB9fo7VeZg +SVWVemnTamFm0SZcgZmow7k+FiFsZAxwe1JE0FxKblgpdBqWEUYkfosYsmll6OsD +HOj64MsW4+X25ygJdBkBjFsY3xa56cnzAyIkuZ7j0ScuNimQzNpdt3PT/khY6CY4 +vSkIja4TapNAoTwlTwLYBVLzS0pT78VS8JsX0cRFNCsISLatGD42APeCcGDzXB+5 +fo0+nJsjI+hLTMG1gpkGpZugChtoHMmRBMcHsblPh1QPsyD3RqStpzNQQcM42Xw1 ++ZrVxoeePj2boMmu5R2l3Rx4O7bBDPbD4SvPSbxX6s7dXeu7Hlox18LMA6lhMaU2 +8DmtcdUWRVTNsi7KifAh168qH13M9xBRUXW5n3K65W+v1Fp87mqp7sbXFRXRK7Gw +OBTScWf99JRVzJiu77DsxJqilLo72KCSbVuZkYQtDMnagHukPWplomgmGkaQf1QH +bAMAlcFMm8Oy6EDQjsAjtxj4hoFMYilN6svuau4UwImfy2Dw7glRppYJOtsa/lgY +89K5JntejJbBxObvOhatH2Lybi6xOZIBDueYzrefXJxb9l2Ul7U3AUhFlcjCv0uc +fTwJrRzVpzOqkzjWOZpS3KSSu/+5LCN4+75Y7/2A2OicpuD8dAVGeSPBxyiMziVN +aBt00GKdLe6WnyofDTnEmEQ+LLdS9fyqPjBquHTlZyF9BRwU1xM4hd5IrNlmusq5 +57MqDaL9RRMgB8+bx9TTSUQQRAeSdumk/ekUykMFG/aItjFNR8PEth340Cz0x/ZZ +n7xgo6mam6yTnYVcA0lBfhRd6X5OzZEkj+y9zQmhKUBe8NKI+bdxEuy1McdCeRb+ +XRI2SBiW/k4PiTZLDwrP2UmeR58fO4OHguyO0oPfnTmbYunqKNg27TxuN6R/MqBn +YDV/gS1RYO+lWSfll2cQO6cWSBCgJVw3Z+XAlGGRxiK5pMHictO8Tq2O0w8AFWzm +85CT7Xut9fJafdom6h6lK299UKmVJXzqesw5keYUv4yI4wAgdjJsQ1450jMT4CkK +WknpIHbwcloCM20OL9hVhTWtUbUKQGWwnauRabLDPSoh582JjCAAxi4keSeWyah+ +z4TGFi/ft+ZAFxhmspSjILUye7GtWJgXg/GFK0G2YoB01bXcINduXVbLfT6FvWOn +NzXgzEQF9/BfEkBQYKeAbUyOP5izaV7fJ+UPlub1Cl5/v96zstDfKie+OHHKELWA +3WZbxcRQdJKere+ELXCFRxCEqnPXY34ZhwAiPYswKRBPIkBbTrtdRYaiEO4XuDtu +g6hPhgQRvG66usxHnpGxPONtDLN54uQbkUMnLYV1mNQXCtaFekBH4J/qZGI9rpMt +7Q8bE8PLCX6aHs4+v2IqMaEj8LhxvyXu0Pewo9TrGAnL6TBg2rA6mB6UyccR3fh4 +pYaLq+P8oUCKm/ZzI9k6zFHZIsLsBtIFx+KTZMYbpX4TUGa+0Xd9DgEJ7wH83dTS +hdoJzL3dEXwfnGeqVVhEYLUi9NXhEEEMBEyPoH3XLRLXXHZEPdhDSg3kB4md/Q/a +Xf151A6UsETsRdZaJBNK4RsjdcIkheCsE3rAuRiErm8rAASThxoAJ0VWfq96lMar +D/OkvVXgEYVDRc04DyH4W2DQTqvi+37hjxxOoxUVK4sQYR4wkJHYhgvyYCzi6ju9 +1+ZBexKErbnfuOgDpBeUsyzLKrSuCFMz36RpslgBGe7GeKRfUjL9ehaBIhsDntbJ +z7aIzsnUls/yCA2O7Cc8tTvteZy98HW9krgqmD/QR/EKNq09kw5EiKlq/dUpSgsm +Ql4x0N0nFirqnocRKwHWJ6eQokD120/OjB6m0unYp4l4fRll+fjOeZOGkS1NM9nN +OSXe9vM9IGR7ZPrdeIUJjFKQVYzDJNe5FhbHqfaMCzbo30haEfXqqKY7EhiUQ80V +5Jbqu47byjFmJ7mtDoQKvpMXCH8edxaZqZU8sR1zROjLUo56UJa3g/lAmXrle9mD +b5lphZBOuYuzFIWov6DEUoTmUuyvvfMJLMtPBdo4bHsZl/XfQzFyDmpZqyM1OHGU +TgUGfmBDbCXYsXWCE6i/l3eTrn21LXRHwavaeuex0NNrT6S6fQdSbh0kTGiXpgG8 +oOPXO37BZkogXLGgHnPlpBaTtIRbKiq1fkgdNNec3aNhsI4oUCQycrL0tjiynwEd +GRCHsNdMpzLBN10xc9zOmn+Jfth+qw12COSCL+3OtduKOkFOTr9GtPU3KtPww8Xw +/8XZ7vQsyArdluRD9SHDcv2M4EIZPI/LqRSxvGFLzsu55W0/byW72dygv7dRSvd5 +geZXPO0WYDbYdJX3Ix5oCIdjWXDoU3MY6/GFEFQULdRgvGmPN2gl33IkBCcHOJxQ +JVglnmqb4W8v+veUbXgYOVCrWcRSMC7xhzMkxmt92sxg56K+C0JnLBx3848CzFLx +O3WlizRLYwV5sYx/fHvPiNgzq9BpCKozkbuh01wKo7n7Hs8TOBftzxtT97lQqSWJ +NdzZTGw4M3Vx5Qh4n7rLO0rG7XCJ6QcNf3rzY4Jsb40cMC530+4sZc8Cnoa34Q3Z +3wRAcZy5pjqOp67siYT5n1/c0TraiT065pdjwAO1GRQ7lBq1R3pc8rRx56tuHI6S +vUFptTIBTiYTfGUTNNV+plbuHc/Ke3YNWWI6IGRl38CmacOi57YbWKQfodjABgQn +B/y7EIGQmmcLyotiAR+aPenJxcODWGk8pzyiPhOkdnkZahQibxRha9ozv7kbx9H2 +eJPzW42Klv2y1EHjl6goF6ZOb4J83WwejTeBH51PudryI5ALm4t52dhDLE0gjhvG +s9OtZyIu7LaJ33BJHrZGqZOhhugbVqep99kbK9lV9Yapq+f26nzuQB9PENtizZlB +jF1zJedzfWmCtzzD+WyNR0UFaivew9dqopZUtxTEdEmrxyBQjBq/58qS7TF1U0th +Eh6KGOdwDf48IEgfteQKqRodkUjxdk7P2iymYB3HuBnB64DrM2j0GrPw3fW4/eBn +5vA/1aUyAQg5x1eFb9XMbf9umaAyulCB diff --git a/scripts/profile-shr.encrypt b/scripts/profile-shr.encrypt index 8d3ee22b3..195e4f62c 100644 --- a/scripts/profile-shr.encrypt +++ b/scripts/profile-shr.encrypt @@ -1,197 +1,197 @@ -U2FsdGVkX1+gpzxWmnpkysVLsD+byqA7x+5PBcBPWBXAs9WXD/cqZKOGfn6a13+i -hb28oQSKA2kzskuxLcHObk0m8xlaB50LeCULFkqdbNPV++DXAMbflYzTSJNk3oag -VbEyjpjpma/1vHQxE0ImMZFmFXkWJJhBsl+yXlE9TGJuFknYiVAA2yj546OMLceo -bsppfhoEE8QX4WnH/uWUZTRro69ew6jaEXvPiq2HoGtL3IbTbF6OKj8BCWiYz7vh -ymHBtAD9Mn61Y2a/vSYhmOwPL7ckoCSkZRgr92If7hs3i9poCHB2h0AeBnihdzMF -r6jG0L1W/mOTvF7ed2xeGHfs3iocFEzrISXndqo9bmbMjyqlKLSzgEvnT2Zy5OJw -CJ5ehc49bSkjXfanZZv0f38QSjiCE15HvEI0GnjKBHvjU9sP7Lz1ZmFars1ULikU -rOMxgObgS0arXdiWfvBE/Ybdtcrg3JhAy9skZmJ2GL4Qn2PqmtRmJGoIkZTW89Ld -NHNnnLLcyHiBvzC0ANOp3SGLqnJgf2K12mSsRtdoIDwJoS52ylr7ihL5LRCOiniS -ZGbCI92mjkpUFb95w57a6x3y8TlDy+HIu0J2hUuCGU2Lk9xG/xEn742vGvGzBJ2c -f5N1ttL98mfn1w8F6fzxz6V0Ddi5dsYeWwsltq60PVhYbsXxvHyxowgk1ppSo9Rk -KtqP2yU930QkO1Y9e+0ORW2mwuEHRi6/eKByY6vkw7S20n30Jh9VtC8IolXve5T6 -m74SOg1IgFspVcx5z90kq4bgVr55TjwBTI0uCcADaCSVSsU3fvOhutxoBRl3scmZ -/5k70UYPkz8TSoC7IYDF2fXbL+wst7sOHOTRaFGWKREUiF9+j6fb3zE0B0JJKLeo -pngQVqsTrcSL5+le6fG2rPIanZgJjOk63Ty1X/Q1pJuotdIr4+dl94cFBHNEaBNf -3STVzuZGGTcsDMshIHDv+t4D1jjRngqfirgnQyIt7lWP02uT37hHEpt23k5hDx6f -Aumt3sdgWWVuVIZZRIxbjiZ0USaEzF0+goxWBPbGyj+a5IENAnig8x8oNFiZ9x3d -CbRekDkP3lBrirOT+YHFypPa3vEnVsaEls1SWhjxWV3tseM0l6kf7kXfByLLfkiH -ARLdk6o/zg7ECrzYepXez2l37YkFp9bEFRX4yDD2yrE0wz0RchJQau2SL7SaDGau -IBkzYcuSYgvlPvN5Ah96HyYpyBbAY3yHvwEuNcZAF3N7xZBk1EY5ZHjPdzWHz4Je -8Y9F7lr07TzrWuivZEJifQHEEHgjsBC7YMtbqnC/hOeYkU/PpmieqfDE/PLg18pO -7OAh3C8mS2qK6IdR0kmdMz/V+nm3C6D4gZoLpQtwWCheCemVIzdsqpRbqz/nUYd3 -P7x+gy6FUYxav/J6C5ynjr3vhW5d+qbySdOGQSg24R0A546xu+kj/YSYYJPDH+s0 -0pkLpyDOBU1seWuxwzBXVqlhNB+ROZCSiGX+0rdqME3lOVbC2Qv6hg993v/0iNrc -PmCNudPVPU+8AhDFnYbL4io6ivjJsyvc1tU2H/82grN9EDHVuQR8Xfo/k/csxWbj -WhDqdiktfdjLO1p2inUwUeFk4OhpsVP9ahl+9LnhFA0i8Egn17Z7cNHFbRInZeFj -9xMQMnXYfXHx5ZxjFlRZBOdpByxDUHCBDrf67g/14TlO8RAednIrtLandH1s1DvR -ZamBL915dPLsBHQk7IM/LoSw1H0Xbt9O5MazgZzLiL5JA6rOtHvGauM2vztsjDdx -iritlLdELXgVN9+ivJQy8h+LfuFduH4ALGG8BTVx+5eKRewsX85kv2Rjuz61oaD6 -K6O5nW0UnZUOOaayjyBQE4SNsU+WLfFAVZLbRqjEE7txBCvPVDYfWE75WhDmnKGy -HsrV7be5C8uzyHf2aK5W3gNHVgzzKMwNDstGSKN91LlN2kw7Vp9IG9J77p8JK55+ -y7n7ltooIECIqzb7sfSpB3bhzjkejuJ0f8fbHT0WiTAJcDLmnlSUAhkYWVW6ioAY -opHE+3fgajlGkG0STxA/kwW38RF2jPp7jVsGo1iySy63SXgi4m6aYPChejAAYMXT -L1eDkUElDrXWiLnXNjbfIzlrkoyvx1OQGDeST0dBVSJhIPyqoZqZV+WCwFQPaFL0 -VxfaeOdq+qcDVZhYgyal/hc0cwGswrXC880XZlxARR8IbJE8U4N0XfksFzjMFwtU -oq3hAbDt4qh9RRpt74GrIduGBEtn0YEBJUZCLEv+Rjq/dtBqDI0albsWVMt8lfSO -2cBrFLleQJsMo48tu/7aDi/fzWtKJhWY5c16SzTtrsr3/UQY1+IQ3wUui2rntGIE -aTMU8Pgkcmlk0Kt01KCdnzgDUACcDyuZ/xh7KwYzQ7R0rUMjizohoqOeUxX9tVGR -4yztgyovyl9zQfN96BMT//OGoXLkd5ZT/YJ16MLEy1miq8r0W/PPh4RmAyW5aLHO -xcsPaLIO+6P7m+BTnl5swTYbGDzTNAoSkawJRvi0kJYJOga32vIwvGgUnerfQMjK -Nq5afSmI8MrKW99SWlhAbkQlCC1OohUJWs82IULNwtlhLdGM0U2LsjZNQb0w1l1s -lOluXeU6ampj08bFE3c/3aTojVeTym4ALsm8aec7ezHDEfESIAb8fqq6RFQmjeim -JCH6wsDkkjmuX0/5RA4Ke1mWAdfULbzfMZAAV9k0N7YP5TQ94lSeTJ322qLXD5Z9 -93J2Qg6u8q5X8hBX2vnrJt1dMcwPwgRxDQBSUx5ruBrip8CPEzu29k0D36yufdoP -PA9BlSwj0kluxzBFpQ0iLb18qE48gCUCypa4jw6tdpAb9eXR+DcTR3oMEhVQoCld -uRrK05ehQjpjk/oQBNohHheB3zQhinmN026evmlotHufcFvTf7tQbe/8LJJRLWd+ -kphZfBsD4WFbfBu23bZBIZoYnmfGFcEGWjOF74OYhvzlU8It6AMDyuzkMwuk70kd -J1Oj84Y7ei2iq6wxiQlfMYSaEwqRRK/dGvpyYjZ2jqayBwhcRkFAkZ1B6AEAOMMp -moGDzGV7NJqr2j+phwygXXqMs7/ehBoxSGw21AWolNxWTJQEOws0Ld/ZZXVBsDw0 -j37v3xyzecO9UG2vWGACcedqSwEUD1IeOF0gRYuq3F0ddj7RBy88s2tqw07pbEXt -6JceZl4WFW6hk423UC1CTZ8vhqhN4565CHc3W+w32rOgzi73bzf7j2OKNKV0R7Vz -2aXrDZAwH3RmDacxwOnQ3aovpNiUPs459bCB7DieJg0rS1q6PnscJNKxUHA9Vmgy -XRwAr4ShtZy0F2TPwj0Yrku19/BnkFRJwJxEZEX0JNTgkIhOeb7Rp9VDOg+S33/9 -o35A2gTaSooNedI0UU5V9vvTaUvkRZYQR8RkBDEqoTOGWYdek/XBgpf3nDN+f2Pc -Vo4eiGisrXjNkbFB/QLWEV7iukFtDNUxhVpdJ0Q1n0mdWtyg1N2Isc8WyIlXutnG -q9eH1PbeYQ0NKcqRMBGOY+XMVaHShXMDeQFYerVhRNBZujR+cLHKvQKS+Y76Cv4N -61jyJbmf9Sih/IEdBoRQn6hAhkYWSRmyMSvKX3L0fUdAAYMoIoHY24fS24MJ84gC -NoG7vmN0gbtC8iGSFX3o6kIdyE2/gnf3iK1LqQTNLDwwS9x35PosbIGRe1moIDyI -y7FGaTOFXKCUnQ5dcOPKLnHyUDGwy4JUKfOfKxG9LZUhoYWLQr8YrXEDzMjulBjJ -+cUq262+b7o+TtBL1o3hnTaB5Nt4bzk4FGrzSpeEghWYMTuotXDjwpXyBP4dYhnH -3srRrO9tZUn6F/mO13vGTrtAvwdtWommcPPPWAwK7BQfe2Ux2vpB+OhFOiL1PTA0 -KKu/a4bLBoXP5hrFi3vCVesbPcK/IIPPwwbH7XXwreyV8mpCy+C9/s5SSN5hV+7F -u26lZGUe/oXprXufDQ4dAIY+D4ubbTiKHsjRMYZx3AVWW6fkqsKR4BR9tkko93+O -W7otfpJqpKYz+MtrIqSpkng7FyTWc4Lk1CDwLwa2GYP19xd4bxP0T0qt66Gz98C4 -N/lnHtzZ+7yN1IMJV41P4mntNrtqvXpugA9b4r+A1woFGBiLNJDLnWI4A3VnskRT -d9IXq8e4ODWo2mg6WeinVi446WfZxy4a3w+v5AEcOrIE8BkP4ynxK1wzcEZkGUO8 -9TT9EY7llqNLoI+Fi50MokYcPOVTUZ4HLLQwUuKT1KqGFqgbYBpK+sZ5xS4NdHRF -lIphD4fGTiPTjF6ZWbbTCnWwVdr77f+tOlHPJjxFrJtWOthjkWH7lHbPIHIB1qh+ -0hbamNT2VMTocdDr37v8NeLjKGkaRq7gsAacv6OcTPagfSpOmvj7+pebu0/M0sHj -Y7/sqGEonjHXoLh5EOu21qWpTlo15gBomXhw163HHN3AmwiK4AI1WhYJdsZ1vQky -IpgvHb01qapVm7TSmnv7Ja5yC3JRo3+hoI5pI8CUefBWXkoWXD95DUXomZ9EakdU -gBoWqbTMFZR7MZTQyVOoP8yFIzqs3wv5JRgAy9hpRDIso5vj/GLkJGa95ETbTU/t -qOnFe3uGf5+jbq8+8lNffUAJX6FRCMyQTS9uaSot/P+Z7zLHBkCOeALacT9XwAuY -rCHJtajij/Y/mF7OZBlRrqhT08+1yUn+MH4oi9S0oHB+603tRy7dIk1xFZ8ohujb -soqGjZ+lx7nIPjJpe5RkZ3FlmSYCEWuWjNVzWWkkJmxuOP+NZaVjnaBrqJlwUy8M -6Q72Ixhs9CaP1S6ICC8xL8a6ky0+5QrE6eie4nX7mqxjXn6KatjVpB4TJTCPiEpe -k6nkfSfMmgWtd7YEb11nlrlnQGTtfy90ZeSChRLXE65zDrfRu0YPkg4ytvoTISJ8 -HOhEALkN83rXQaBGDGsFthT1i5bOZIZeC/Xu3jtANMjQBMqnVTp+7hBAevvCJDsq -kOd0LAsGYjgIM7jiQx8vEynu2l4ORx4s2t6PWjbc8N0TZkwtKp4aaEt1hxbau9IQ -xgtdICFeoS5og2nV1cmq5Um9eVH6yFr0QVXnRYXtYWw1nanWXcJw8ZknaoFIXo4i -kfJLxOLs4z7gekec91sY6MQbO1wCbp5qAYw2Y12GmJZX1eBkvIbvGoIEkwbdASe7 -00mvbqQT/iYMuaAeMCdLcdqTHEuITF/Rxi5/QRkvXCC31jqOtmnOGwOZ93yrngKi -y6LlZ1t/QPNhs8+wIrMGbeynL4qMTBhxMK6jPWQ4iSAKOLIv9kOUL7sKkbBXN+pp -sWfFnWIx1EkoM3CkBNK46XhHyNZ46zjNcuyJ02Utb3Ls96cnnt9onRuzwRfp+dsh -2PPEk3hcCZ1Kfxl/LBIIaJFLYXmvNK4MHjzFLrYYtu7zAMlBNXJZ0RQfVo0Qsgw1 -vnlB1z0Lqd3Gulq5iPEUlOl4Ii+/vjXrP2WhZmjOiplwmh4cFNsgrwxlWi0K3eI/ -cf8rBGoe3BzRWvpEx5cthEeNJ+YkwiprbriyvBiIiw0UhdrC/BqTXJmy2fQIdMHB -4jiayeB977k/GdnF0HWRszVD9zEHUL47Kgmm4QAzsAK0c864dW0mepf51GaZP9+i -cz8VL21TNLQ2zto52eyrNKrG6sd6XEsybcd056xf98LqoNxxUywqPY6ENgMK1VaO -2Ce83JBpZUAKjjTNz3PZr2lcL/epnzzK+UghNCQFoAbffkYjsnUng5yj5CGwg4ST -3XpXvUio/O9Z2oV8hhZY8sQ5VGiwMkRfAufXKqwY8SGWYQiurUWBsk8VNmvs3JC/ -7o1Lr/6ZEiJxdzSUV0+o/rmS0DpaT9oMLYQ/dm82gPpCsVIvHcqqQfmqIIKBrZA0 -otpXUIqfyGXBB+yU1SPKPwGq/Proif6GQq9UROgN4Y2trIkHvXAu5mZ2Rf09tOGg -DoSj/GLhligvN3eh3lewEO8qQJilc66pCSR4OcwJAOrNnkg5fMQ2/9G/6zYxwX8q -Wo4FJKjTN3z9iWKThnklmgbhHI/Efcg5cFBinoo2EH8gRTda9YvN7MST7dXaU7zO -uQP/eYlKIBdyS18d0mNsssBQpMFs8c0W2YDinzXTnXmXMgpGOiBiH2877I6VTo7l -Mx0SiQ02tsa/rjknBPRTKAwMrktPEoF8iF3dNnzk1Qt2OKYawY7zAECwRnnZhBlh -z8dW67jv4DQmeB8OZLesaWmAQM7/gmw5ux+W8sf0LAonhE4dYSbhLjsehEL6zHW1 -1UazjQK1BycAmxsB09kDP/G4s3v7wrlYT0Z00yzn6nl8OBdSUctjYXESZAcTYMzu -xH4vAUOg01YSLCmMNT1gE8GquzSLHMJHw1xZQdXtucEIm6whVB+35I3nTiyfyL5H -OaCt/zb1TjfZkLNhSS2XOE9C+Tr1NX9obgGH2syljTL+dFs/CkIfy7iyhDL9Tlk9 -evlyvCbvJpFlOCcpS4BKNaLICJucTnHy/eZN83ooLRdqKCZngZUtdXn0QY8qGuJm -tW+AKV95j882dZFEz0PR5lWfpj9FMp3ESmR5asb6ciMZmvBpfEIivc/n5YCZxvDp -W+CmjY8JZfFKmlO+E09YjFK3xSHvwtJx6ZY8Bfrj7LBflwHIOyrVIEPqiOeWxrVu -VTiioMKI1bwcAFg1Qt5BL4Aju1r7cCKHUZZKsoocHfZKkOXMrGmlwFLYlsNoyfD6 -mbtYkwYmQkBkVInIMHUeGsFvJXxwSremq8ZF5fopJofTGR1kUqBWm9eianTYo/vp -cSbykpkD/LYONnIAXMH5kOtA+sRI9EbiMbjgR+NnTUEZgG9fyJAbEA45t0d1lchr -s3+jT+wwRm1PnIDW6cleD2h3W21A620aXL1GbTgnBf42XR3E5GwBEpYPF8ZFjIcO -jbiMrOQ82QcfjDbuxb1j9Bw5yJ95Bc6GfVcfS1LYm6N6s6l2Rt+37uF4j39WO78T -5dcvI5eXA1kuenx25hZMYZ7SLOAWUCJIRSLBtgLZx1B+hm4K/ezmrdbcMQsXbwjP -5sfoWZ45uz5b6I8eYr7HK/M6uiTNJRhPKJzsGlboB4PyMZ//XsW70JZNCYc7By6v -Gtu+HmIbGVVY/jwG/4nOF1nXSpnJZOJHXEgc7cc4hD3y0579maQsYF3ET2m6+iGQ -Pc76eMu/GF5RnK9XGvE07Vej7tAw71sN64bkn8ylVAUhXtHb+Wf7hEh5QDTCjAl5 -pk/OEoRrHikD9IvmYtMFC6oYnpMSPlaIJGC40dlaTjpsfVwQE1NYpM1piIBViKpp -OQRzC5sQx59F+oPtsVMUNyu+mXMTigsWMzmgDWs31uhpL6W09KpdIG+rhlRDnit/ -RU1nUqvTgj1Ix1KRQYGKTQcROFE0+kMx1sFHr/4vvdHK0HyQNlEXEvN4xqAGc09F -X8PTfJGVhuS1csWJqNWEUjJ8DPhIt9IKupX9VbJSxNMSmeYWUBa8jD+P5aQpHrn8 -7AUNZCIDB0VHlKrLuoKCQXSmsZn7VExcZenko3EsvdmEmB14jBEtiGgDaDDAqFR0 -RMh7kqaGwBu70sGqo+0T2wi8DVtb0CXHqj3ja6Ifu9ycrLVYF3L1z/i5wqVYV+Wm -mztDBaPDFHtaeYFsXxyV4FO0ygiwdpY8nKhrcuE6jrGObgtX0+O0ccKCpINRr3Qj -1Q6lGymxQjJVgFourNcopk7Dveq3RVzv6Gmeyz+pza5fy0OWDeX9I3Azi51omod5 -/0qj7PdQtwAwksc3j+bFfNBDtU+YBmGXK/ufAZRwgBLqJKRiHpqk7mtJ/wbls90J -5NTsyTsgIuvSl6CdgGS5+Bxpq0UY2H8gpHSOLeO+tnd+bDLEUlRT7uUhNO8vm+l+ -Db13aStdAa4hFrjA5gmWb5PWyJSLRncZRPs+NAXGSUZ3Iu8GS+fGiwDGtZdui98T -+GAEqd0kPWkqCwjsdlIIXS8m8C7iNB/WGPcx1zrhQdQAEucMM+ZREn6YPwxcdqwD -DIiOJwnv3+iW7DOa5xPVwWNLIg/XAMiG/m6+psStSjuDRWHp78qU/LYA4JuLjnZx -XUGlb4/YCGQ1FeDNtkAlu0Ltlw2qj7NH2j4DBMjcTJkSrf9fyyF68nUnjIGc1HDc -2gzzAru05YD7/3lUrClaNb9VMK16sIpQHYtvRnFvJimZKBIUuo8M5lxmM7S6jnwr -ueIKKxOLlSA7cHx8aM/Ct+6sRu3K+5e6J/5TGDpab+bzPfBERGb1IohRsTZDgO6S -uflOEfq72+y0pxjAUAVyAQVw5zZEDm83OhFYoCnwr3qi3kDPL4+dqUW3/zaSXfGy -+KZAvteWEhsoysiWze6hn8WLkuH5lIfeUm2nY1rJASTb6Ob/tFBPXOFo0IxI8gMu -GRH6D6d3Ff4uWky64LX6FSXS1q2aaG9xRvn6OEUkKUQkeQNuuLcfcwGj7CJurSui -BhKaxFt+xv1TJK43G28mPAouFak4DryeTnrzJIEUqBYamNbzPcv1jpi9qJgkHaI6 -XfDbtSxGZR8EXen1cf1eW4gxFs63uDT4VZ/UbNM7TuqbEz5St4ti6ztY/vbiTE0J -l6Js6IGeeKQXVDin1EkVP5snxkxUO6OfAOT+K2r5aEUPVrY+GgCftkghO6fnh18Z -sb9/IwD398fbnoffswB9inAggImy9DUFEcNwZrpre1ehbH8JNQkeuIN3ssknMu+k -h40PcURHBIyIq/pbznaZtWgT/rPTa77+Y0C9nIqyBjZHuuZ7p9XScl6RvvpW2rRS -MQATFuI2uPyF86A+Mir0qf+ukxMj5229vZBi7BB9MHyHjB+9C2GwulyD8tCiKY4X -NLTPjJgI2MTc0rnbyfzT4sJNVe3LZj7mTgz5MEFW5pIo1h4Vh86HLT7rS6kpvcuX -EpK/Nygsq3KaouzizP1xtPl7hIizFZnyaSuR8Z6hS1GRhytlSJnO7NdA+tHmjpxl -mvyjyQruQZatqMl8dov4KhcBbmmdA2twr3nqCo4/J7evu4hou46USYZyrlwdECY7 -nIrLC+23FnVrKCLLPwRccRAdyLuG2qNonGCu+LtD+OvhXj9HfmC4h918yzxnWYwn -UFWTZ0DgeNb+vL1xHyzvB/ii5qpqKfFeeHSMSIr9+y0JS1s4DOCpKdxsYoukVlt/ -Qz5aZX1+zb6vP6nLX9LOLn3ePtOz6So2FcWumraqyg9K55dxwONdjqwvTvhCSlbE -BH5XjmthhrxJP8b6XAzDUaQGsBovVIP+fBLhjw16pt9zYd6AzpJV64cDW2rniG36 -AveUDPvH2DVFkDwXhOEDC7tZLm9mNfkCRhYrZNSYSpXh7MvpTsR/N672xlSzMqaT -XbHG6Rg8BF1s3YZo+vLevfsaQSSoJyy1m8fSGvWxA850czks85/h9jgpOlo7HcbL -PtYyX+BwAHTQ/seXT7edUDBES+NvzoBgkV34Us1MO2p6AmBqgujLl6cB0weyuE9y -xeyRjLiSWoZ+1dSbwbN5fu6tMrtiHXApC3RMP3EnoNq2dL2OOvF4LaCPb4DEloKn -hIZAWA6DFDHzI9vuHSdTmJMz7mBdaWS8Vp8taPgxXF3+c03/jMnYvIRs2DEyysvk -vzmuj2oWPCc9lww048APplUUPyWs7llhdQwkJQ9N0PKUTRhFqT/v1+efMOzYqBOY -XsM7pqLietEoGDN0TT9U09BZYQp8HDBGiddtTwUPXISTwTpWRYneZnql2mEGKyYX -0ZbyCHlNxvA1tkzdi3KQDlVffHOQb3KolhYvUQxx5H+erTlPntPQ/EWljxpxpubG -wAhwzzeZcKbkbOfhmM3dubjvDsmM3Uwna2uwU+kZTV4Gz10dOdPQYvcFVyTVrHAw -W09ULjNUgeGxOFBEYejqsez9l3dwFDL5L/FJ0dshU3QKV3qjDkyH679Gj2wZ5U+h -MhtgUF7P8qSkJhExNjSAEk2TRLZDr3aTp08gBAbcS9oPHnxxk9bFqs99HHXq3+fM -DS/xTf892IBsLpU/2n76hm2YtjQHh+ZKpap4hQFFLqTFxboKiEgyQVSy+RB8aPwc -nUmbV01EKE+vOnjx7hRWk5SuSvRVq6UG2bRzqzHuj8Uo8diuWaBxuUEE9ez2GBUR -2NcgD+hEn3VeZPRmEO3IiLGnHVhqyAXm5EEAe76PuzMW0Z+h0syJv4oVnMFdTNTK -0H1Ch9pYxuWUNF89/lbpVcVyh+w2damDLkWdUhm22Q37oes4pVYp4A7recogEiqc -mDLjOW0LlbSPZWPmi0GlKpRWde5qP7uzQY85ZenqemgFK2eVLBfNjh9vT4paPC/+ -t2QgZHqlqEHoGmiELPJdOkJpT7Prsz1IkDFqBZ4dUdnWZ+xKSDX+/s1ZZDUeXsC0 -kNI22ZAVJQApiBbPM9cVtk644nKuz3GT9uqx/zoIA0BK9aKu2LdV/ke348xsAYM0 -DPenNqS5xCrMbi6NJOyXIc7v/Ch3J/ZAyo8Qq49m2GLlTW0pX5e3ZeHmGzBNgMpD -T6fldc19wGEOYQws8GU2dw9g6iUC7lwfv254LGeaLuQ6SrTpoXNgICY/ZaJCeZ1E -TKjR4oCzPogIqH1LgJ9RyGxKQ6+jNUOGWf2+JcDjCpGF7Ndr5aA6NiBNuwqsDLyc -tER6AZuFI7HkbxCZtKpiGtv8LL5eNSelH1sUol9jBSSqTtiM0rcdz/ZseIdTrNer -ii0KwQ7ZODbfeEuXk0l+3FiCv/ijYFy94XA2q3I54TOEdAI4au47/koJn8Dm6l7z -7VwxhIEXBrRpnHzwB/7m6lh5l/WctO0sjhKCDZa50ro6jvtmn+MUvcv57nmE9oWw -snlKO60Jgqm4kqnxQ07rUB088ig0sywdliHy2n4p2Dbpq7QaBDA0n4HPTIr5Fsxh -Om3BDMGFnrj9yRbHV/UP2cTUOoqrxhwBNZigpkl757LGUF16WIj4FDYzIOPElupr -I3ae3uu+bv2Qks4JSvpGyiBluXxfnStmGJZVjScL0K5bvANqVzcGTZX11wzY1ZPE -3pZgWWsR19lmKGZmx5cDv3UfovSwVp4Lo68lmJFCxttqaU2v3qCiiyxjvjzBIH7M -zbF1foDmll99LnZQa46KxFlAvv/hzaZeqhsLIETUGxoHRWMEG3l/qHlBCSrNJq3Y -KXRhYlf6vQYIrx1//FPeCuGce4nyHnmt+lpRMQWut9EAWDjPRJDbYb5u6iKdCdXk -cgYUtHMN/ltJ+tFidiok0FTTvFuy3dZxDySMIEOhcQdFqkuzf9nTn/pEIXg5OjmA -a8Y603QM2EiIjoXH6O2bXJXtTu7DUVeezpX4wfXz7l7j3qSLFvbI6WNfIwg6B9p0 -LJlawxhlEZbbqZJMO+8OmwP7+sHFGIv47oBEUNlHszLFN4z2r/PBeoprGIo8cpsf -pYDf38JeYo0hh2uoWOR2+19KuFNLRkOgD2nMlQUJKsHvHm4UH4IXFjZccHZVJZ/K -tKdhAeJ9CFLTfAIIn4jzAvrkMdV6tbFk+e1hY4MKribfTp/uoiAuukb9C75xL5uB -qmeCOLge1s7sEGWlgx/eGtiXRsja7cyehyFyBajrtUN8crRjN4D8FMH0meQR7Xgw -rKBEWNlsc/KJBBnJHKnzLeNDkTW/XwKDX7RWkLmTbgwJtVaFZZGxVYif4LZzLzxf -fsJTza8ZugP07TFnivnsjUfwfpc5Cnv4atcs/uoc/3oW2Z/icAUHq6tfY1I/DwGW -QgWrqZq5WD79fSjjgSQsLyEgDIo12o8jlISxlcAl4x2v34spN7ZNhyLiHFZT+iNX -GzkdXRmCQTW5wUFbdaZW5ZdCfGqsJSkp6OhHd9Sk5gXfBNk++nK5VhQqCeijFmi6 -P8HeZO2F+mm1ZBS5yFXLfE3a0Tgut+fey5PIbH9Hja3Fi5sdqephsDtNn5GOYIqI -0MAnpTxST1X+tMuS63sRZ7WqsUvaCSNL2XKLDD7yMNZ3O2UZ+AJg1c4gk8X4XcaC -12w0eQbn7EUJkasi8DcPnhmp65+rLUOVtm/vhzVTfkS/oZHJIptfECPEdVy+0VYL -C0getEsr+wKvKFLTZzBuCXOCZvUSSIqTnWKzFCaB9vHTJr7Q4d1HOQQY4xMHEbxI -h2ibA+v3Y63Rpp93w/uYtJAfecbD1EgbFfvjDySVorJqag7AFe7/gYp+xWL7glNo -GI6YR229uticisXy8JmsKlXWzvegyllYOjVJ6JkIyciOKfyr0TNO/X/Y4qa6Hb8B -SbfndxuvvNbpwQ5Lgo2K5bPFTQwyIglIe8CFoFVzzedHTI6RlaZJsy7c/0Nzk3B0 -65/8k4eThGELoeLS/mIwWBT9EY1Je6grVx5xinJFOKhBQbPlWw/aCpyQCzhR2FXB -Dr/5tlM7jrUGjoHrulyUTNVvmPqOf75dsMQwWcX3oaWkounXhZ3PRAd0l2DMNoAY -+7mw7IqYT0ccaoz7+e9YiGDRpNfhfyERHm3JuvDkZedeLtW3HFybTX+1ll1AGjy5 -oyfr0ANI3yQ8KxcvBjG96zB7x9IP3iGN+KpP2kFaEQztaXbynZjtYoN7R47Hb5Ee -525wPDIalX+d+IK3t80fi7lDPBnYAFslt1GhsCsEody0vCkxvuvC6Q== +U2FsdGVkX1+Ro2OkwERnqQxRgqE22raIG0nuZNU+s4yMx5bMTOFAwynkvI/D4vy2 +rb+wSWs/HErbUngILXkAWf8Slq8hjumbVIBoST1PyBWIaw61cIBHVRZymyfwv62o +NPNkcegh24mT16s8zGjHItZ65hxbgfWo7mKZ0hO8sC/fCzeTPQEvc0fuz9KacTez +sqtaz/ZgTpUG+Oq+a+rhhabVfBwpZQ/U0Rm/U4MnY3Ixhzeo6xWIETMWO8oSxc9N +GUVcNk5soo6rwOvuL/s2tHm8rIyHsxSxDtoCfLy5euIwEOriPgvjJ9fLV1KVqmQR +WHAxVXuBKOu+vA2mojPuPYkf9ltYW3yLDkZJ41YSl9KznjlEEYhFgQQK3Rkiqq9h +C1MjNwP2DalMLE628BXT8AUc6HODuHUuSq/fB9UiWqrsHal16quyOTekb+Q96J8j +V1JzfNGS90qhQTWUlAQ36NdD+60bchtf3+A8enaMqCequyaY3PLDDi+OYFgVRwcM +pXfcLfDeqmb3/Q1Jd8RBoz7kZZ19os0ad07HOJEPQHkFSNUfSEGwQ1vbk7R77zj5 +u/gyK3/RIYLSIMpgbdweAx78tYmnABXJ/8yBace/MpyHmUo2jxL+hweLH/aT6xzD +MyISNtK/Xw2aButSh3RsVegNDz0gkid/c4dVb2lhZ+i/rDaY2kZ+8vYr4A1nEgXP +n8A4IhIx0u3ssEOnLoTvBMBurxi0aiqyknKIm8GDveA9l9d7J8x41s/SXWM9mI9j +2/rcXbTQRn8nCmFJazdhsnY23b1O1cdHhw2MMkzQ76yE1iTMHLHS+nDJiR9tf1Sd +da1MXgdOPV+rtCvMJqFPizBR+lRWHEj0w3zJW0dPIbZXySUgZRuFa1L6sQ6DmHDf +Noo46fdezm+usBoJHwvc/K23yhYDrYAPPlsk5eK9mQEvlgFLnPZtduKMwys9NXUt +a10rlLQvMC2NeP93dZCabYmCYnFdIAl/kTqpjs+uC8kFDsdY5W5sS3Z863DBX/pa +mB4aR0B97mfWaGHRqxXYwqehvYmy6Dhq11aO4syKOERqrzQ+xw9NrOJ66SmxZdMJ +ckPx6yoIRew/3fdkGIdgtr7Jd0vDkoKSouXAOR1sdq42ZrLse/Ee7tSiSuy6kwvX +BBzPxKoNyKaNXTg28W0N61aJGn3BrEG6Eutx37VgC7wSk4awjWhxS0ld8DjIbxFk +hQK3+BJqW5BWQQh4FkHfvsWgKSDqzLWbXeMc0WacQJ5tSaMJs+Wc6GTbhs4zcEdm +XyBYsl4eagtr7KMNtLJGzFd06QQzb7T/wbE4Z7V4A35LyGcz/KAr8jPaFT+Z5bp3 +FgukvBkjCaN+BHOakmXPph7zeF93akIySrNm/igFauuhQ2DgqMTtMOCg2/XewoKO +ZbOWcxDUPy2yqlSg0srKYHX+8IKLX96zmmryQA1PpLYJmVZQ/t2muyCoMV137DrD +F9gy8Vul+pxkmXQ9ezWeeaBLjGU+7JFCOMJBkFGbUt4Qsjm539FrGZ3Tne3YZ2cu +PlFJlpYjZAsBnhIKXY7AX9vkT5L2rjuh4VZrLNbcuNDxM2OnH414r5uWoyieFFG8 +W5xhHJ6i6icYjab9PeWkl08hLLEL160ib1oPJFdNiFVL9H1OBhpO5krYLMG3/N/Y +B0u+u3ujhxP5xZkCI0mvMZWO4zDlYUp8w7ci+iD9NzOK4DUJgXmxTswtLEa49mOw +hVD9ZuZByfwZChPCJ+7i27CVrZWir0O9ffjOD4AZ/bpNQ6/TAyS6W+W5fmiT+Rfa +49YbVmuPFS+uEzeDJ/rfpbO32bJ9fGgLICyp90dCss8lBoS6BHGurkf7XimmSkT1 +3onAZ+rs6T+pYrcAqBhscjNQpxjX2Tyc+6M/lxqsNFfG3Dd2SE1W9Box/lo3NS9/ +EOvNOevjjpBlK95tv0EiShPrBROcBvNxbxlteCtfVsMXldM8Oz12MtOZiT3nQPSV +TVbH7v/QNN4E/2Szb4sAO86Rwv6fGXTKsmOM6k1yiqi0y6j9WivwuKn1IiTpAajW +Cf8uQrXFJq0nzklq0b+W9xGHlCQJIHGCUFRHFqLIf1WHfi/jqYsjyUsAGMtcmNoN +1Ggzh2tX8vEWH4Qy0g+bEixlfn0eP22UWOyxmeWpOkDtQuBFBau0Dtns6I917X4Z +INRls60WtPoxWwUJqWfw0PnvkZef6e7qzzT66QSmuyMtIseTq3DYHVd46Rgq9c0d +ZjO6+irreIfmzoZuzHJnNuzj1mZeladcbzimGjrB9mJggspNBrM50q7YcuMv+y1o +mQuAIPb6L5u401Ti3BLMC/+fsgj1gT90r+cWwckXmtZZVaYB6xP8K3qlAzcf1lP6 +TXmHRC3wywS3aDFLHU446KU0NwjQ98Jnc/f1ZNYNTtnlx0Cn8JMihXVFTDn71fJl +papY+tk1LGp+ViZzkLoQ7jrjrOmkQ/uKnTxqITpBRlZrv0W1upz5nqdET4bd4D8n ++5r5ctsMCY/0Aa8VksgRxsE1fp14XItlH94aI0UFv+eZ1gJwsal5qyTP6phtP2FB +i2mZP9tSjggkB15YRzlQ5q9NHG8uBktTVVC9E4blDTVaYXfxe7RiQ4eFNIKGbkET +4a07zb+wMIOXSxhlj0+qK6LJubZsDYQYSSjaaNL4hN4dn5Zfh20WtuF1lD6+Ujcb +Rqqa3CEVe763CSolSijrmHOd/GbfKQneeo1nYD8TvwSiGiI7iIcF1Y6LcMBS1fIx +FIfiwCjbwlS5C6lfW4ZsqMutbH33WjLlgsibrPG5t9kIRm+AeGrdpu79Dr4VYBjc +FPZ11LbN/aKaokjCXomK9grUpIgT4Xxra4yLVprUjgzfhDBdWVO3011SO77OgCtQ +xJYC3V+nmh26VoAPHUEw1Ep+Nr3PA59TdI87tNqDq1fG17Nc9cOOeUbgu/84oP2J +KEGfZnNsryTF5rhIagFqUICWvJLn+IuOF8HQbtYnvQxoJLy30YoFij3QX6K3f/42 +BokjnHXI5RELnVWL/Bqfh90x+cqIaRKUWzrK0P9dirTDdpiiI3wIXcUhysjvoBTw +Gd2OQTAWyQH9K5bkiaeloJaemPWcnbB5ELJTNnAtu1uNIP/khHI+rGO9NNfNTMJZ +MC3R7B/VCa5MhTt24l3dHeiEKbBjG5Va8D9zEVj17DQJWS8/TuqeV5CTCoBM+Z0n +pae0ldCJEG8yuoLEmy3qcTZC97RwSqHoJ04HxFIzAKon79o6TaNKCeuSlWP14X6/ +uuNvFvEj5MCkJKH+P0lZuAtcheZQqA4V85tWg/a2K912CtAdmaCA+kz8DzV5kMNT +CNZT2mnGP8C4Zzi2ntX/+NuitZIeVZYdxyLUj0dIv/5HL8lRxjRonI37Bvs3zu9o +edN85602I9fkI2mYCgI2IeFRb5zulrDIurrlrupl4r81h1M3uaFfFWtZOqtWr5qp +Sr61dCr7Mm4+/o/ex20tl7YFT3UcdvLZYV2oX3UVIqnhCQfEDDFmQ1kX+0sime1w +9yXWVlMhERrlA0VxABj540SVtA/gpGIP6/Vk6qcyW1k/QlKldtqHpMDpnX6JF11j +EbXFf49NExU7COmWb3TJZqTa+P8mYaMpVI6pe331gyIcuFuVKB46o+LtbVYhwfpA +g8O1xb1qsFZI8D+6sccq+4C62EkFDxWyyXf8BpmgWR1+asiJOquNlZS+//IXyCx8 +xLcePsMS6CfXY9uZcc+JYDdKDDNDYokaXt8rJeo4AFsgyzp2yZ8KiVbian7k7uc2 +5JpxMSB9wop1ZR33TT4T19cuHeedvnSFvJugu13TT52qoE3Ho2IOQ9kGUpjSBXXP +K38dQvKT+NlTmhCspbrzdfvAQEQDijPhITXR5GvqmxkwZgDGgygvjDxRMiwdRQXw +ZL3ifs6XE71pmZmK6MpmT1Bec75mVoiX2bNqBZccWJC98jMUCMwSdA0RAr6PCk0E +VB2bRxes0dnuztnV4zuJFGOG/oH6r7QMwKp61KGwIeZhwvjeLIvYWWzO/oM4a5Nu +cm1fKzp4c2J+FU+ogJWSF5Ek76oYdo0E6RSB42LBAPxa/HjIBDPAR9nqJ9j5mhvT +4ZYb/1PuoBc4uwCG3DqbxZTjwT6TvIdLBkHMGkYw27TtCO7KksepUKzez5jwOXZv +oQCXjop3J2LbQ4NkASStidjv+zCJIO9Cl/G1izcgAWSyijb1lEtGnD1lguR7rraP +co27WJZ7aI/OtSCGM0ezSfOodT6Am6Bg6AnzsZ7OCHadbsmp7GubZLCyE0dQf3BF +c8cHqk44h18oCT4ieiuYBQ5H1MQWDwgCAj4Ji5DoiPhg8FTr36PIeaIARl5L/cWa +lRbv5IW1+LRvnLamOBJ0EncQvdt9ohIigZHpVrSV+f10WulM9pFvRNOKADl0qKbB +ylln62Qg/gNSt+VQOBH5AwBDm/PTyKkSzn9N0Rs4mnSiRyjjOPi3sle/d6zJNnUf +RILRFHcfMGeKS7m9GTOVRCyhDTP5wnBVYr6YXn+o96Cx85Uakp08nqCpQqUerGss +XW8o0aaWSjv3M1HVf4ceMgtAqWDCcpUuCrO+l3USFztsy/Y+yoP/kN0a4UCFn0Lf +ccrvQdHxJo9qfqGFSa1W7dlDYJKGDq2oSyREa5J00lmkroexhJUPIeY8PTKE/SC8 +jaaoFBG4PPiOVZhEgO6KtjiDqukrBoEgvu891wU3i1vhVxnFmnkpEKlZlnxnIHsJ +eHXSwhHwzXrBVp7osbgV9SkuIasxGL8WEgsjQviBuopvNXMUBjP711K4EJ/T8NZl +u7tAW8SuqTPdlk3XdyarE9gnZceVtyj8j3RmLV4tW8jfskYouXHDsiosURbT4TDV +xXenZXNvKLOL3SjYwZeae1kRACLq31dcIbdF/l/W84c1vapME6CEBIyhGdhQG5HS +ftnJqB+0bTBbguKCnnFuNv/thnXqF1SmyKJ4TXg5FSOumkIPmqhaQIueZMDz3/kX +X+IBh0T4x7C8TdbPOa3i0JI047jr6ML8yXUt4vG0aGPFAOeuyunvbiFU+ARVaTJ4 +W33TcIoTXW+nphl89cPcum4pdslO6qO+kcBrsR1hI/7aPia3NdujIctFunil4Ryz +xl8o2tJIpYSoehnJly2/ZCN0sk7cxmQVrY/KizsQX6cGHhFDLhRV4lvJpYoCfnxo +EP3rmLWQvjuzxr4GG/TG++8eo2WSZ/r+d/qbNvK1lRHqKhWlgHWbw9xlQc8wg4zd +K9pp2vIU9j2/cE3v/v1VjuTzomNGOl3wZa6soI1qi0q1kGqr7UOitqrfMccrZhdX +neogbgnIIYXjJQQHW5uvLJDTg3BhCmaJCNyi73+8QL3jASJMGpQIY8pEd91IuDVs +QiSU1sxV/BTdULUR3Zy4h/nwC+wHUxhZbunGCanPE+a8ZM+KM1j0xSR7qUafGwF8 +V7OeUhKowoNpJJMCt2r32cFJLLGqicjGr8Ir8U/8VR/g4XiNHOgUlIVca8OmME/T +6T0LJ86f78uQ/cApB4IoW0XOP6bZ60aRRuN/Laeu7dLglXqiP2PNyMq1kpKoKOPi +GZOnwlPer0uRVL2f9EpAH+L6qKM4/h9FD/6bs1xdADJR3PaDndm3SjwRJuzTL7Nx +vSKV6EuzHc5JDGil9QA9DtmzhxlIYoleIE+5VJsn01Sv9FAlFSPo3r5+GTtAoBrg +6xrg0irx7n4eTY52L6cK0Ml8HcS5ePCGNBbKF1w3bBpK8GoOrBevtW1mfTKgyaHU +7FLVf2z2ei5fFhlYdMyX1iUoOv97m6WBN9HMmYXmmKkFatN3xfMXFNYW24dNe8lT +oUojB+mJDrOFD6NJqR+LcHIRB25XTY4oEUC8navhEafcEQXaodEdfp4tjxBMVV/8 +UUuTjkIJQc9wB4ndIErP9jwGzfzX7NZaWfKYN5oLD0E52U16l9qlSKTJU0JP8XYD +nLxKZoLUP3KJE4a8mJOp9INOVWqdJNNfToD/gBMtcCg0HwFk5VIqgy2Q5QsaNE/k +vaCa+MORH+jPFJSRseKg7qvVGlieinsxT+ilNwtbmPjp+3uzoD0BIad4TJ1kKcab +5eXW2K7tb8IjRgq6l0A3GGlu3aIXE6IQiM5OdDZwiUPPp/X77mfK8KxcDkZTU+W/ +zwMwKisnVaG5jPC9TMAHnZapj9U8y5lsXaOgH7fY6Ov1VBwelyiL8qvVeNOo5HR6 +ag8l5qP7LOGRkoPESNftCfiJxBS17qN4nTCepp4Ku2w0VLrz9N9mEz2Ul2am4LoA +X5EjmqZnxwiLRtOKhEjnf382VXmb71QRLB59jKhmbBmOq/DZ5EEJyOZRIHDZ6f50 +JvTm1NJZNQ66ZE2elUrFbgYqArCzimKvj+INmOz7CICotSL+6xPPLwq+69w+mw5Y +jfNJ0T74W6q8yZcAooSlybxwOjCd3RmaV8Qo4eBc/ew3UYPP1Kd96C9qxsqTPSvi +iNZucGvTOQV70p3vSSzCv0Doyi+mrTcrE9UITD01urx5zWPrvn9a8hhvtYtLYkOs +5kWGqgWLu/pvT+dD5jKU0yh6az90j/b/g7fjk1vVO2XHaTYs7gO60hSUWfPKC6W6 +cqeywNPcnv+bYZtoUwpdOLI4S7UwLTOrFxF3qjr0CV8vSzk0PuBIrTr0Xpwx1Q9z +NyJNInrZAEXiabxqruFHziylACSXsNXuCFo4FtpZkpmx4sABfNn1BvwF6lemy6iV +sH0I9mXtsao7NodERGrHSGjyDaohFlgmUXrIzypDcnGc32XoE7+AvYAiYgAOdzX/ +7rRzTDTlTv5ss9fHs2jtKSPERNx124Mj2K9VX7E6KGuy193cZyzhAEmrYKOaKOY1 +5jyX2hiQleB1xjOWg4cNhZTdEoGP4DTcQ13Egx6khUBALTTJhcJU/FX0JQJb7kCp +7/A83zaNAkcHzc4FdnhF08VECQRn/ZUvU2e342AsmxZv9opI7CSD9UjfCXYvfXnN +od7wrLGsBUChrVHLDGst22GM3vIFwC6gAKvCYrf3WL8wFO3KJGXT8AyGHijV5OyG +X7OeFHJCWoKU63G24FGHm4rKRW9S0mn1sxYVfhf5THjJXs8RpsUsH3+G6d3paSb7 +v58VAEkW2p2dyLA2cjqHLaaGJBgYuqyb/hP2kVu+lUMnzcyabncrcq0g0anvrA4F +R+WdNtV6KvWYToiOBQcpl0176f/11dGU/sWtGYkbYF8JXhf0GUVadM4VAC6BCWtG +fC4TTN0FuL1ZJjRVf3mLGOTQfPzag8Y0b2JxvdQo5dJ5BSZfB42LFj2VEQhKcAM6 +rj9/rJj0TcsG2B8wSrJn8KpvyIXTk8CluxMRSZszYOF44EitBv/gldotIS2uTWNN +8t1dQvHGoV/71U1QfzQW1ovg5B4HGIg3z21ju45bbQYvR2Ay8//vjHQmfvyE/PKN +a2JnX9tf6ElaAkr6/mrvskafHTktE+ttKQwBkwgfFUu5hX74wcKY1/JCq9wOujA5 +HP9xsGpKB/U+M2MHWuCqhCx7hBYeb/9/iJuFLFAPJfr/X3cJq7LRqPNmAJRQd5nf +yZod1fQ4AS0VM2bWeN1+pqDuZlGu5bOJ3S+9/B1AvE+QgAXWWNf6dwS2KvbiHdih +y0EYjldzN+fY1UMt+z7djTq51sdBg/1dS4xxsu8Bxdud1yf0su1TsENfkDI3SsP+ +V8QG0/FjQcA8ajCJSLNo72SJ/vH6QxtQ5fcIbb26vdAPY9ar5/Db2nypeGABpZnF +BNyBhqj95iO3JZf1fbsBdqFPK56NNDVuWqLMSFi9nYBuEeetnY82FNTvlae/Otos +qx5LN0rKoD4q/nLS01lrCOJRv43g3vdPZA3DvQaGO5rt4bkFibWf8bZ2yetHhIIa +dXKfi9tRkTXzYFZlT44DSf439CrVrOf6B8wuy4K2xHWft1jIMVnRuXBjY9fynVUd +ulMMPTi9WGvrwSPgGRlql4i7wXQv/efCrZNgQJnxmNOHofvs6ju8VKRh9jiTBOrd +ei3bOspnIS/kuCylxQUgJe8u+JeRJu+YahVShoCrW2MtiDFP/tYxOZmKVYf1/k4j +aHpulKoukPlr1QlnY8CDaEDXk9S+ZnPjxLjbvgBQaIVrxJj1OQteJK6S04fOjF2j +epyEJJmoFqL1FVVhOKhVBh0M/hq18CN9rCicpVfUVjIRe8zMf97HcfVeSUQ9G9RA +2TVl5qCL16B2SS4LD+41+6kHiKkuiHqXRKekUqDQCFOp1J10thiZWF15EA6ms+39 +ab54P+ZmfzNERyvzTTc8bgEhiepB1y0YrgG6U6MuaYpp+Jq9TPwkAVqkaJbe3vtO +21gUdQFW3mzUlGhWnCDJShKiCPwtF6zYfwcG4yERxoNUYCm+JYnRgRIpMQXyARyk +inbmp1mc+DJY1CrfQu02mrcvRDopfndyxruIt/RKdTpFqh8VFp8H0fCQWJdfy4Gr +zSx8rRZBxL7I9w7tWXXWuRcaZGAUEAFZYM4PN1aAcpwXt3rePli5Wr9Lg/5dFLyk +fbhaswP2i/x3HZlOZB5xb25mW+c7PeSGbx8A0zrGS+/oRy7Zy4ONZ2cNy2PEg0x+ +CimyDQqdrnkA8r4EYndlprsKhq9r5VSuzgfZwTYHqAzPkWLDGbBH1NC2vBM0Jwz7 +5y4dctP39I7s2K+E31+xVTOlpY7xhhvXjERmjIOKQ+dCNP87ay3fxd4pmM73i7L5 +qFkvNdgDe3bIfOsoiXFpM462cAyYiX9otH+eyvE3O5CTbYO3jLA7DhnYttGB2XRz +Povj8ODHY6F8P8HNVPU0wl18f/cqvdkhrndOfAocOdPKTUgmw1OW6r8gw/AeAoY9 +q49plnYTPKRNyk0u+kKmcudzMpzvzbE+F4t4iTYWI3NzdUNdljPcpKz6PegYw9yU +DtXYS85a/PSLfaAuoWHh7HkLL34Oles2a/7afyKRaLdefoMlTfMGAJn6smy3wVTK +APr5Rl2PZkhVDEeslB/vAd7We7oYo7JVyrpT2NOT9kYxCmvONnFu1jDJ3x3vg12I +idfOnfNTmhxab1Zsil7aIoqWgE4UQh5CsbChzGDcMZCGFbnP3hxRVPxFR6GCfSVP +Ja6W0ZWZCK1cXkayz9PywlQ5JdJKmjdH1qHci8arBPp89OTL9CL9k+bOPTvp5UvD +3KMEaxoumwsrCw92OO3nptHLXv2mcaFGC/Y1YEvvv0Bpb4lD56Kv/6Uizv5tud2E +0WWY63fJ+31C9pweMAZVMEHZwSI1iq5fgNfFGMnS1h8dxJLolozEgHZY8lTaL4fx +BazgRGbsEX/Qb93Ld91ZXXXLgMcBNN5shsa7K/IsmMUGWtiUzlUi4tBIit5QmOvj +ICQ7UxkzLE/LGat/8tk8823NSrLHMTetjhpHY8PTQf497E+rAnIvcHlZok3HTiHX +YMxGSJxOJI2ff0x3byiwZPf/dgZwK3/LDn5ck4LZx1qXdY6/3Dg6vFxCP4avJdnj +rpJk5BstSwca7l8d7PwfAVz9tKYxzhSBDcnk06o728KNfdYALy6h05jSbxq3d6Gm +0+Mh+ydr9E9uRT913TOVcsyXVwRrIHqBOJfxlyG/0HYKpN36WfXMDPeDZAzZ6Xbq +7SPjjVhEyPCK/gCOHDriBppWHDn/GhGwS8Jv+fWiSyLmqSSXtgO1oxrGo19ugV1S +K3or6AbePviDUkk/1NRDokMyHeE7TWZmzllmiZ2d2rwI+/+l43zWitStRW7xH9Nd +YFo343oWHkX/se1jB3EKHmSmDmLa4etmKXN1oIxzl7lRedRMfIb8RtqZ2lX0pcw8 +UwRcVAe6tMqRvUNMArZO55AUJ/3PyFL0m/OaxUqDt8JzsM2i4V4qfnoELo8vGmzq +UWYyDlcpPBznvEPRGa6mDq5359VIFAQGlySpXmFIwoTH3EkirY3j7DnmFC1d5kvM ++JggJElqkY/QUM0BrXrmUEzKH2AxjfffUKJNBrufTnkzyJLxasUbBO5f5wiIMLXU +IlEZaepep5tcgIkQsL5Kff9p5vIL2IrikhmdYCX9Npy8g3Ks+18mPXGb5wG2rgts +ZzDg7y0tyjMeLM7YZzHDjMD4qqIOmirEgfHxEGgLYn+fW4N1JiftzmU8dH9Z/Edr +xnNT+uxlNboT3e/QMD00/xDsI4vl8Wknk3YQEGvD6a89sBUBkEg4sAUMA+WZfaUP +Z/D8M9vhDsR1V70LtOWA6fld07az6JBer+A8M1Wq55hjVrR47DNa4xy3CeTxqLji +Yk3Dp4KAWJMAR14i2aLWTSshwkdHg8Sy1adCvrR/NdwOiBQq3uUAuA8jiCvSxQTu +LVU/atAAtOFRIGWitiDDn5l2Zyqqeo4dgHlB51cNvv0xRc0csi/ijddguEX+Ok10 +xbhylmCKDh3h7Lh1fA7kTdfW6joTSnIvJHJR4BRU7+41bLkxWZfLpkRivvFGKFm/ +lBx2HPZo9DAa5owqkNb0NzINWwSlgR3NQ8wNGmjHsrKiHGzi3vgNlYvlX653X71l +2n/5iDmmPR0uxJJm2nux2BhzYHr8rT09FLJOElSJLv5yDKkrgkNm9HNdFB6ZDsEw +as3rz31RjCnjp6SRUcw/+cjVrPklN/CFH3t44OAUnF+zGCrMAZ2Gvu7pStnYEf4q +cpLyw/KxCJSwOrwp4+Oe08y+6YH8ja+HXzUlsIrtJRaBdKnXbzUNX43Hz/0Ku2ov +D71ID06Oj26FWpE0EzfAKzPXxwKOMOK0I588v1EzBxMPMAkX5vKNv1ibs0V1whYl +ZdQd4Cyudq0NPSzr9TGRlCd6i4YsNqfkvXqPjwuXTNxzLZTxhUFIrkOwFKZ3UzeD +dLaksE4Aknpe699BHwfbDDk5Sb5tXl0VHZvSoCSckLHryULXysINJKEWbPDiap1S +nOxpJUTd5FI6bOVM1UzHtg4E+M7n1+zCIjKRi+1JFKTzvzKJEWyhIwaSdzyCScI3 +Pl7DsleNI9cnAObdHSg/kZpqJyO1NUgHm3X1KXoI3A/NtmCOfIcg1vceur5G0ZiY +SWEYvKgqNqZ+FpxgQuTt1hXkyaLyvFs1k54MTurPE3ht3oZP/FvDP0YS9W7U3yIB +5CrRWMOKq8j91ollzBwCPuGQ7+TSSHVVJafkYsAQdVe5y5rdMcWfHLN3U9hDQnlJ +jSii/4+AtuUR95BVojtIaw/FcfH+LS1Wnersy3SGNJ2j0wMSwy9oqFAMdWYFGjZU +iMNMt3BFxAaQwLOz+WWAFh6PMissdM5B5OLzYxZ7gQ+0ohYp7pO+snwfQIQHjzJM +CC33CqOjiB1bJP0tCnPUCidXwuqHn78o8hzesexx9HRbtScdZehj5R0ccyq5yY4C +qAMj6mTvrQ2/EKalfMnFS+UmGyD9W+ZkgMF62HIh+0x2Kce4e32mkWji9MRFBtL9 +Yhn6qMciA8noDdaQb/lnDgI/kBMNUSFsQcCynkHffvRWumaSm2+e55Mga8LpLpMb +47JcYBhcNn02S8znhP15z3b96SkRh7xHAGc5ALpIy/k8GNjr/b/bWACy2npliLQz +GFqwMhRSNmiKZ4v1CPaGE+2/Dy5DWpq/7sobQFNCOnnDO/BYX5/vZYzLZF37chSd +FaPZa/0pXiGr3z3O7xZGx9rLon2TgITzoHIxw/vSNt5saI+/iyeGfmN7EnfTzNfd +Iy9DTS5FqVddAdFtPc407zb8AsuRVW5Hw38Fri4B1Vu/JflJPfIGr2HIgll4wsFN +JKtzYcx34/Xldak4wNfPIlj/UoQ3zFjj5Ov/01MDO3cafvoL5l313W/g2v9k+J5k +iLA8gu693kAqH6zL3Zn0jCS7aIoTdN8P49GHs5xGAMR3n65Kw/Ow+9v8KrQ6JRk3 +ugMqqg6wsC07SiJ+zJOsN2HnYCX4xhI8RnDixzdYxWx7kmtMbgLedzhafgMoHx+v +E3vWnDegioTGMuoIjFGpxu/3PpL+tkHypx8AWz0PLumYsj+8KlV4haNBu6v6w1Q0 +rtkZ3NE6ywu82mVrMiD+wUOx5F0cqBp8IUOzMdUmJmz+NQxepBSXLJySFqHOBUGh +yRvVLtVefdg3UqyW2oiL3jNRUksZmZDEcM9djhWNJE7wmbIAoE/gH5fWulavMvp+ +3MVdS0KXmQGXiXqK4VF4yspoSdPsmG8VnZO3YRH+FEJPzjV8oN0LaAMAHNrrw+YN +4j/V4pJkVSdEYVyMJq1w+rICEds1KG6XGngryyh6OlR6kdQDzcUDm29IY8Ml603K +/LeG1roUjniL62u1UeZngyZilY4bi1FETg7+ckCwfmAwLyH8SJFEvmpPK/H2NrF9 +w/AE+QHTL8BNDbM4NBgqmqfrKggFFf/eFE7AxrUceMZZXBfG9N8DfhOiyt70JjSj +9+UjBeRLuZ/JhjrKo01bFPjtbQFoy/2yo2IqYPYCo8G6VN2y/qQKHLs7IQ/zyShQ +XNhPezFO3P0wpw4QpDfkQJVyrCEzoEohmlCoiSelFgMhHywvFowLA3xHNM519O7i +ZROF16uDE3qcOcIPQA4Me4g5ZCM8aouWwRbV45zpRMV4gnMoCBp4VUIrnkXQmfHv +hlV5uZZE9PB5Ms6Xb9GPRbpFkTbFXaan2PoetESI+cfw3HtjSdUv2w== From 81a8d29c5bd4f2e406a78402f48aa3d9d6397a3e Mon Sep 17 00:00:00 2001 From: Demetri Date: Wed, 18 Jan 2023 23:10:41 -0800 Subject: [PATCH 020/495] Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos Signed-off-by: ddimatos --- galaxy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/galaxy.yml b/galaxy.yml index 2ce8af29e..b8c9397ee 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -81,7 +81,7 @@ build_ignore: - tests/requirements.txt - test_config.yml - changelogs - - venv + - venv* - make.env.encrypt - Makefile - make.env From 239c04737364502fc49b5929c3f973d1544f9c17 Mon Sep 17 00:00:00 2001 From: Demetri Date: Thu, 19 Jan 2023 09:14:34 -0800 Subject: [PATCH 021/495] Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos * remove redundant entry on last line Signed-off-by: ddimatos * remove redundant entry on last line Signed-off-by: ddimatos Signed-off-by: ddimatos --- meta/runtime.yml | 2 +- tests/sanity/ignore-2.10.txt | 3 ++- tests/sanity/ignore-2.11.txt | 1 + tests/sanity/ignore-2.12.txt | 1 + tests/sanity/ignore-2.13.txt | 1 + tests/sanity/ignore-2.14.txt | 1 + tests/sanity/ignore-2.9.txt | 3 ++- 7 files changed, 9 insertions(+), 3 deletions(-) diff --git a/meta/runtime.yml b/meta/runtime.yml index 0798808bc..dbba1c7ce 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9,<2.12' +requires_ansible: '>=2.9,<2.15' diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index c362873c0..51e13b014 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported @@ -81,4 +82,4 @@ plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index c362873c0..c40f3b41e 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index c362873c0..c40f3b41e 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index c362873c0..c40f3b41e 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index c362873c0..c40f3b41e 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index c362873c0..51e13b014 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -58,6 +58,7 @@ plugins/modules/zos_job_submit.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported @@ -81,4 +82,4 @@ plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file From 20bdea300d13ff67b83563b065cb74ba24203dcd Mon Sep 17 00:00:00 2001 From: Demetri Date: Fri, 20 Jan 2023 11:42:24 -0800 Subject: [PATCH 022/495] Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos --- Makefile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 428f5d602..66c00acda 100644 --- a/Makefile +++ b/Makefile @@ -37,8 +37,11 @@ CURR_DIR := $(shell pwd) WHO := $(shell whoami) HOST_PYTHON = python3 -VENV = venv +# VENV = venv +# VENV := $(shell echo $$VENV) +VENV := $(shell echo "$${VENV:-venv}") VENV_BIN=$(VENV)/bin + ZOS_PYTHON_DEFAULT=3.8 ZOAU_DEFAULT=1.1.1 # Test if docker is running @@ -712,4 +715,4 @@ help: # If you have formatting issues; try `cat -e -t -v Makefile`. # ^I represent tabs and $'s represent end of the line. # -# If you need to debug your makefile command, use `-nd`, eg `make -nd vstop` \ No newline at end of file +# If you need to debug your makefile command, use `-nd`, eg `make -nd vstop` From 6b9cb62e1b520b04fcb22f5d9d19cad89b270d51 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 3 Feb 2023 00:12:04 -0600 Subject: [PATCH 023/495] Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment --- changelogs/fragments/588-zos_copy-emergency-backup.yml | 5 +++++ plugins/modules/zos_copy.py | 8 ++++---- 2 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/588-zos_copy-emergency-backup.yml diff --git a/changelogs/fragments/588-zos_copy-emergency-backup.yml b/changelogs/fragments/588-zos_copy-emergency-backup.yml new file mode 100644 index 000000000..393a0f50d --- /dev/null +++ b/changelogs/fragments/588-zos_copy-emergency-backup.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - fixed wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system + to its initial state in case of a module failure only when force is false. + (https://github.com/ansible-collections/ibm_zos_core/pull/590) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index f984e9195..9e3c7ad09 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2233,7 +2233,7 @@ def run_module(module, arg_def): # Creating an emergency backup or an empty data set to use as a model to # be able to restore the destination in case the copy fails. - if dest_exists: + if dest_exists and not force: if is_uss or not data_set.DataSet.is_empty(dest_name): use_backup = True if is_uss: @@ -2261,7 +2261,7 @@ def run_module(module, arg_def): volume=volume ) except Exception as err: - if dest_exists: + if dest_exists and not force: restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) erase_backup(emergency_backup, dest_ds_type) module.fail_json( @@ -2370,7 +2370,7 @@ def run_module(module, arg_def): res_args["changed"] = True except CopyOperationError as err: - if dest_exists: + if dest_exists and not force: restore_backup( dest_name, emergency_backup, @@ -2382,7 +2382,7 @@ def run_module(module, arg_def): err.json_args["dest_exists"] = dest_exists raise err finally: - if dest_exists: + if dest_exists and not force: erase_backup(emergency_backup, dest_ds_type) res_args.update( From 959555d881d45cd93f363bb33235cad974a8aed5 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 3 Feb 2023 00:17:59 -0600 Subject: [PATCH 024/495] Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml --- changelogs/fragments/enhancement-518-text-converter-import.yml | 3 +++ plugins/action/zos_fetch.py | 3 ++- plugins/action/zos_job_submit.py | 3 ++- plugins/module_utils/data_set.py | 3 ++- plugins/module_utils/system.py | 3 ++- plugins/modules/zos_copy.py | 3 ++- plugins/modules/zos_fetch.py | 3 ++- 7 files changed, 15 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/enhancement-518-text-converter-import.yml diff --git a/changelogs/fragments/enhancement-518-text-converter-import.yml b/changelogs/fragments/enhancement-518-text-converter-import.yml new file mode 100644 index 000000000..691a57273 --- /dev/null +++ b/changelogs/fragments/enhancement-518-text-converter-import.yml @@ -0,0 +1,3 @@ +minor_changes: + - Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning ".. warn:: Use ansible.module_utils.common.text.converters instead.". + diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 4423a2985..dd2172fc8 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -17,7 +17,8 @@ import re from hashlib import sha256 -from ansible.module_utils._text import to_bytes, to_text +# from ansible.module_utils._text import to_bytes, to_text +from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.six import string_types from ansible.module_utils.parsing.convert_bool import boolean from ansible.plugins.action import ActionBase diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 7e7c9833f..6dcadad05 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -15,7 +15,8 @@ from ansible.plugins.action import ActionBase from ansible.errors import AnsibleError, AnsibleFileNotFound -from ansible.module_utils._text import to_bytes, to_text +# from ansible.module_utils._text import to_bytes, to_text +from ansible.module_utils.common.text.converters import to_bytes, to_text import os diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 24d6d5500..8cd7199f8 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -18,7 +18,8 @@ from os import path, walk from string import ascii_uppercase, digits from random import randint -from ansible.module_utils._text import to_bytes +# from ansible.module_utils._text import to_bytes +from ansible.module_utils.common.text.converters import to_bytes from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) diff --git a/plugins/module_utils/system.py b/plugins/module_utils/system.py index 5bb5fed20..90b9d1013 100644 --- a/plugins/module_utils/system.py +++ b/plugins/module_utils/system.py @@ -17,7 +17,8 @@ from sys import platform as SYS_PLATFORM from subprocess import Popen, PIPE from ansible.module_utils.six import binary_type, text_type, PY2, PY3 -from ansible.module_utils._text import to_text, to_bytes +# from ansible.module_utils._text import to_text, to_bytes +from ansible.module_utils.common.text.converters import to_bytes, to_text from shlex import split diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9e3c7ad09..7a32b1bd4 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -664,7 +664,8 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) -from ansible.module_utils._text import to_bytes +# from ansible.module_utils._text import to_bytes +from ansible.module_utils.common.text.converters import to_bytes from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six import PY3 from re import IGNORECASE diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index a930e3458..dd43310b9 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -275,7 +275,8 @@ from math import ceil from shutil import rmtree from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_bytes +# from ansible.module_utils._text import to_bytes +from ansible.module_utils.common.text.converters import to_bytes from ansible.module_utils.parsing.convert_bool import boolean from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, From 6ef9a97a72f399b1ebaa257c112910a5ae07523a Mon Sep 17 00:00:00 2001 From: Demetri Date: Mon, 6 Feb 2023 15:33:13 -0800 Subject: [PATCH 025/495] Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos * lexicographical order targets Signed-off-by: ddimatos * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos --------- Signed-off-by: ddimatos --- Makefile | 992 ++++++++++++++++++----------------- make.env.encrypt | 562 ++++++++++---------- scripts/mount-shr.sh | 92 ++++ scripts/mount-shr.sh.encrypt | 71 --- scripts/profile-shr | 230 ++++++++ scripts/profile-shr.encrypt | 197 ------- 6 files changed, 1118 insertions(+), 1026 deletions(-) create mode 100755 scripts/mount-shr.sh delete mode 100644 scripts/mount-shr.sh.encrypt create mode 100755 scripts/profile-shr delete mode 100644 scripts/profile-shr.encrypt diff --git a/Makefile b/Makefile index 66c00acda..4f1f6f58e 100644 --- a/Makefile +++ b/Makefile @@ -10,24 +10,26 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Makefile is used to assist with development tasks that can be a bit tedious to -# create and often recreate. This provides a simple repeatable means to perform -# regular development actions and encourages better practices by simplifying -# tasks -# This makefile relies heavily on a paired shell script `make.env` which should -# not be renamed. The contents of the `make.env` are encrypted to adhere to -# coporate operational requiements. The format will be published should you wish -# to edit or create your own version of `make.env`. If you need to edit the -# `make.env` be sure to use this makefile to manage it by: +# Makefile is used to assist with development tasks such as running tests cases +# or setting up a python virtual environment. +# This makefile relies on shell script `make.env` which should not be renamed. +# The contents of the `make.env` are encrypted to adhere to coporate operational +# requiements. If you need to edit the `make.env` be sure to use this makefile +# to access the script: # (1) make decrypt -# (2) vi/edit the contents as needed -# (3) make encrypt +# While of some of the targets work without a venv, it's higly recommended you +# instruct make to create you a venv where it will perform operations: # (1) make vsetup -# (2) make build -# (3) make bandit sev=ll -# (4) make sanity version=3.8 -# (5) make test host= python= zoau= name= debug=true +# Optionally you can override the makefile's env var VENV to instruct it to +# create a `venv` based on your requiements.txt, you can do this by: +# (1) export VENV=venv-2.11 +# (2) make vsetup req=requirements-ac-2.11.12.txt +# Now all make targets will use the venv you assigned to the exported variable +# and also a directory `venv-2.11` will be created and populated with files used +# by make. You may consider pyvenv so that you can change your python versions +# to meet the needs of the various ansible-core versions. # ============================================================================== # ============================================================================== @@ -46,85 +48,170 @@ ZOS_PYTHON_DEFAULT=3.8 ZOAU_DEFAULT=1.1.1 # Test if docker is running DOCKER_INFO := $(shell docker info> /dev/null 2>&1;echo $$?) + +# Unit test to skip +SKIP = tests/functional/modules/test_module_security.py divider="====================================================================" .PHONY: help Makefile +# ============================================================================== +# Makefile +# ============================================================================== -## Encrypt the configuration files with a `.encrypt` suffix for files -## [make.env, mount-shr.sh, profile-shr] with user specified password. -## If no password is provided, you will be prompted to enter a password for each -## file being encrypted. +# ============================================================================== +# Run a bandit security scan on the plugin directory +# ============================================================================== +## Run a bandit security scan on the plugins directory, set the severity level. +## Options: +## level - choose from 'l', 'll', 'lll' +## - l all low, medium, high severity +## - ll all medium, high severity +## - lll all hight severity ## Example: -## $ make encrypt password= -## $ make encrypt -## Note: This is not a common operation, unless you tend to edit the configuration, avoid using this feature. -encrypt: - @# -------------------------------------------------------------------------- - @# Check to see if there is an unencrypted file(s) to encrypt, you would not - @# want to delete the encrypted version if the unecrypted is not present as - @# there would be no recovery process. Then check to see if there an - @# encrypted version of the file, if so delete it. - @# -------------------------------------------------------------------------- - @if [ -e make.env ] && [ -e make.env.encrypt ]; then \ - echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ - rm -rf make.env.encrypt; \ - fi +## $ make bandit sev=ll +## $ make bandit sev=l +bandit: + ifdef sev + @echo $(divider); + @echo "Running Bandit scan with sev=${sev}"; + @echo $(divider); + @. $(VENV_BIN)/activate && bandit -r plugins/* -${sev} + else + @echo "No bandit sev (severity) has been set." + endif - @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ - echo "Removing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ - rm -rf scripts/mount-shr.sh.encrypt; \ - fi - @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ - echo "Removing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ - rm -rf scripts/profile-shr.encrypt; \ - fi +# ============================================================================== +# Build the current collection based on the git branch local to the computer. +# Currently, venv's only manage python packages, colleciton installation is managed +# with paths, if we wwanted to install it in the venv to not dirty the host, we +# could try building a similar command to pythons venv: +# ansible-galaxy -vv collection install --force -p venv/lib/python3.8/site-packages/ansible_collections +# ============================================================================== +## Build and installa collection of the current branch checked out +## Example: +## $ make build +build: + @echo $(divider) + @echo "Building Ansible collection based on local branch and installing." + @echo $(divider) - @# -------------------------------------------------------------------------- - @# Encrypt the files since we have verified the uncrypted versions exist - @# Note: we should move make.env to scripts as well - @# -------------------------------------------------------------------------- + @. $(VENV_BIN)/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ + ansible-galaxy collection build && \ + ansible-galaxy collection install -f ibm-ibm_zos_core-* - ifdef password - ifneq ("$(wildcard scripts/mount-shr.sh)","") - @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin - # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - @rm -f scripts/mount-shr.sh - endif +## Build the changelog, this should be a release activity otherwise the generated +## files should not be checked in. +## Example: +## $ make buildChglog +buildChglog: + @. $(VENV_BIN)/activate && antsibull-changelog release + + +## Update the documentation for the collection after module doc changes have been +## made. This simply calls the make file in the docs directory, see the make file +## there for additional options. +## Example: +## $ make buildDoc +buildDoc: + @. $(VENV_BIN)/activate && make -C docs clean + @. $(VENV_BIN)/activate && make -C docs module-doc + @. $(VENV_BIN)/activate && make -C docs html + @. $(VENV_BIN)/activate && make -C docs view-html + + +# ============================================================================== +# Cleanup and teardown based on user selection +# ============================================================================== +## Cleanup and teardown the environment based on the level selected. +## Options: +## level - choose from 'min', 'all' +## - 'all' will remove the venv, restore any temporarily located files +## and ensure config is encrypted +## - 'min' will restore any temporarily located files +## and ensure config is encrypted +## Example: +## $ make clean level=all +## $ make clean level=min +clean: + ifdef level + ifeq ($(level),all) + @echo $(divider) + @echo "Complete teardown selected." + @echo $(divider) - ifneq ("$(wildcard scripts/profile-shr)","") - @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin - # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - @rm -f scripts/profile-shr + @echo $(divider) + @echo "Deleting python virtual environment 'venv'." + @echo $(divider) + @rm -rf $(VENV) endif - ifneq ("$(wildcard make.env)","") - @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env + ifeq ($(level),min) + @echo $(divider); + @echo "Minimum teardown selected."; + @echo "Deleting files = [make.env, mount-shr.sh, profile-shr]."; + @echo $(divider); + @rm -rf $(VENV)/make.env + @rm -rf $(VENV)/mount-shr.sh + @rm -rf $(VENV)/profile-shr endif + @if test -e tests/functional/modules/test_module_security.txt; then \ + echo $(divider); \ + echo "Restoring 'test_module_security.py', previously removed to avoid execution."; \ + echo $(divider); \ + mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ + fi + + # Unsure really need or even want to do this as part of cleanup + # @if test -e make.env; then \ + # echo $(divider); \ + # echo "Found uncrypted files, encrypting them."; \ + # echo $(divider); \ + # make encrypt; \ + # fi else - ifneq ("$(wildcard scripts/mount-shr.sh)","") - @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt - # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - @rm -f scripts/mount-shr.sh - endif + @echo $(divider) + @echo "Default teardown, deleting $(VENV)" + @echo $(divider) + @rm -rf $(VENV) + endif - ifneq ("$(wildcard scripts/profile-shr)","") - @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt - # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - @rm -f scripts/profile-shr - endif - ifneq ("$(wildcard make.env)","") - @openssl bf -a -in make.env -out make.env.encrypt - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env - endif +## Cleanup and remove geneated doc for the collection if its not going to be +## checked in +## Example: +## $ make cleanDoc +cleanDoc: + @. $(VENV_BIN)/activate && make -C docs clean + + +## Copy your ssh key to a `host` or the default which is your username. If you are +## copying a key to a production server, a second key will be copied used by the +## jenkins node, this minimizes the number of times you must copy a key. You must +## have set up a venv `venv` as that is where the environment script and configurations +## get written to manage this make file. It avoids continued decryption prompts to +## force users to set up the venv via `vsetup` +## Options: +## host - choose from a known host or don't set a value for the default operation +## which is to user your username to look up your default system +## Example: +## $ make copyKey host=ec33012a +## $ make copyKey +copyKey: + @echo $(divider) + @echo "Copying SSH keys to the managed node authorized_keys." + @echo $(divider) + + ifdef host + @${VENV}/./make.env --cert ${host} + else + @$(eval username := $(shell whoami)) + @${VENV}/./make.env --cert ${username} endif + ## Decrypt all scripts used with this Makefile using the user specified password ## Files include: ["mount-shr.sh", "profile-shr", "make.env"] ## If no password is provided, you will be prompted to enter a password for each @@ -136,15 +223,15 @@ decrypt: @# -------------------------------------------------------------------------- @# Check configuration files exit @# -------------------------------------------------------------------------- - @if test ! -e scripts/mount-shr.sh.encrypt; then \ - echo "File 'mount-shr.sh.encrypt' not found in scripts/mount-shr.sh.encrypt"; \ - exit 1; \ - fi + #@if test ! -e scripts/mount-shr.sh.encrypt; then \ + # echo "File 'mount-shr.sh.encrypt' not found in scripts/mount-shr.sh.encrypt"; \ + # exit 1; \ + #fi - @if test ! -e scripts/profile-shr.encrypt; then \ - echo "File 'scripts/profile-shr.encrypt' not found in scripts/profile-shr.encrypt"; \ - exit 1; \ - fi + #@if test ! -e scripts/profile-shr.encrypt; then \ + # echo "File 'scripts/profile-shr.encrypt' not found in scripts/profile-shr.encrypt"; \ + # exit 1; \ + #fi @if test ! -e make.env.encrypt; then \ echo "File 'make.env.encrypt' not found in $(CURR_DIR)"; \ @@ -155,271 +242,139 @@ decrypt: @# Decrypt configuration files @# ------------------------------------------------------------------------- ifdef password - @echo "${password}" | openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh -pass stdin - @chmod 700 scripts/mount-shr.sh + #@echo "${password}" | openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh -pass stdin + #@chmod 700 scripts/mount-shr.sh - @echo "${password}" | openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr -pass stdin - @chmod 700 scripts/profile-shr + #@echo "${password}" | openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr -pass stdin + #@chmod 700 scripts/profile-shr @echo "${password}" | openssl bf -d -a -in make.env.encrypt -out make.env -pass stdin @chmod 700 make.env else - @openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh - @chmod 700 scripts/mount-shr.sh + #@openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh + #@chmod 700 scripts/mount-shr.sh - @openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr - @chmod 700 scripts/profile-shr + #@openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr + #@chmod 700 scripts/profile-shr @openssl bf -d -a -in make.env.encrypt -out make.env @chmod 700 make.env endif -# ============================================================================== -# Set up your venv, currently its hard coded to `venv` and designed to look first -# to see if you have one before trying to create one. -# @test -d $(VENV) || $(HOST_PYTHON) -m venv $(VENV) -# ============================================================================== -## Create a python virtual environment (venv) based on the systems python3 -## Options: -## req - a user provided requirements.txt, if this is not set one will be -## created for you. -## Example: -## $ make vsetup -## $ make vsetup req=tests/requirements.txt -vsetup: - @# ------------------------------------------------------------------------- - @# Create the virtual environment directory if it does not exist - @# ------------------------------------------------------------------------- - @if test ! -d $(VENV); then \ - echo $(divider); \ - echo "Creating python virtual environment directory $(VENV)."; \ - echo $(divider); \ - $(HOST_PYTHON) -m venv $(VENV); \ - else \ - echo "Virtual environment already exists, no changes made."; \ +## Encrypt the configuration files with a `.encrypt` suffix for files +## [make.env, mount-shr.sh, profile-shr] with user specified password. +## If no password is provided, you will be prompted to enter a password for each +## file being encrypted. +## Example: +## $ make encrypt password= +## $ make encrypt +## Note: This is not a common operation, unless you tend to edit the configuration, avoid using this feature. +encrypt: + @# -------------------------------------------------------------------------- + @# Check to see if there is an unencrypted file(s) to encrypt, you would not + @# want to delete the encrypted version if the unecrypted is not present as + @# there would be no recovery process. Then check to see if there an + @# encrypted version of the file, if so delete it. + @# -------------------------------------------------------------------------- + @if [ -e make.env ] && [ -e make.env.encrypt ]; then \ + echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ + rm -rf make.env.encrypt; \ fi - @# ------------------------------------------------------------------------- - @# Check if files exist in venv, if they do we should not decrypt/replace - @# them as they could have edits and risk losing them. - @# ------------------------------------------------------------------------- + # @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ + # echo "Removing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ + # rm -rf scripts/mount-shr.sh.encrypt; \ + # fi - @if test ! -e $(VENV)/make.env && \ - test ! -e $(VENV)/mount-shr.sh && \ - test ! -e $(VENV)/profile-shr; then \ - echo $(divider); \ - echo "Decrypting files into $(VENV)."; \ - echo $(divider); \ - make decrypt; \ - mv make.env $(VENV)/; \ - mv scripts/mount-shr.sh $(VENV)/; \ - mv scripts/profile-shr $(VENV)/; \ - else \ - echo "Files $(VENV)/[make.env, mount-shr.sh,profile-shr] already exist, no changes made."; \ - fi + # @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ + # echo "Removing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ + # rm -rf scripts/profile-shr.encrypt; \ + # fi - ifdef req - @if test -f ${req}; then \ - echo $(divider); \ - echo "Installing user provided python requirements into $(VENV)."; \ - echo $(divider); \ - cp ${req} ${VENV}/requirements.txt; \ - . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ - fi - else - @if test ! -e $(VENV)/requirements.txt; then \ - echo $(divider); \ - echo "Installing default python requirements into $(VENV)."; \ - echo $(divider); \ - echo $$(${VENV}/./make.env --req)>${VENV}/requirements.txt; \ - . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ - else \ - echo "Requirements file $(VENV)/requirements.txt already exists, no new packages installed."; \ - fi - endif + @# -------------------------------------------------------------------------- + @# Encrypt the files since we have verified the uncrypted versions exist + @# Note: we should move make.env to scripts as well + @# -------------------------------------------------------------------------- -# ============================================================================== -# You don't need to activate your venv with this Makefile, but should you want -# to, you can with vstart. -# ============================================================================== -## Start the venv if you plan to work in a python virtual environment -## Example: -## $ make vstart -vstart: - @echo $(divider) - @echo "Activating python virtual environment 'venv', use 'vstop' to deactivate." - @echo $(divider) - @. $(VENV_BIN)/activate; exec /bin/sh -i - -# ============================================================================== -# Deactivate your venv -# ============================================================================== -## Deactivate (stop) the venv -## Example: -## $ make vstop -vstop: - @echo $(divider) - @echo "Deactivate python virtual environment 'venv'." - @echo $(divider) - @. deactivate - -# ============================================================================== -# Build the current collection based on the git branch local to the computer. -# Currently, venv's only manage python packages, colleciton installation is managed -# with paths, if we wwanted to install it in the venv to not dirty the host, we -# could try building a similar command to pythons venv: -# ansible-galaxy -vv collection install --force -p venv/lib/python3.8/site-packages/ansible_collections -# ============================================================================== -## Build and installa collection of the current branch checked out -## Example: -## $ make build -build: - @echo $(divider) - @echo "Building Ansible collection based on local branch and installing." - @echo $(divider) + ifdef password - @. $(VENV_BIN)/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ - ansible-galaxy collection build && \ - ansible-galaxy collection install -f ibm-ibm_zos_core-* + #ifneq ("$(wildcard scripts/mount-shr.sh)","") + # @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin + # # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + # @rm -f scripts/mount-shr.sh + #endif -# ============================================================================== -# Run functional tests: -# ============================================================================== -## Run collection functional tests inside the python virtual environment (venv) -## Options: -## host - z/OS managed node to run test cases, no selection will default to -## a system registerd to your user name, see make.env -## python - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11 -## no selection defauls to 3.8 -## zoau - Z Open Automation Utilites to use with the collection, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1 -## no selection defaults to 1.1.1 -## name - the absoulte path to a particluar test case to run, no selection -## will default to all test cases running. -## debug - enable debug for pytest (-s), any value will result in true enabling -## debug, default is to not define a value so that it evaluates to false -## Example: -## $ make test (runs all tests using default users system and dependencies) -## $ make test name=tests/functional/modules/test_zos_copy_func.py debug=true (run specific test and debug) -## $ make test host=ec33012a python=3.9 zoau=1.1.1 name=tests/functional/modules/test_zos_copy_func.py debug=true -test: - @# -------------------------------------------------------------------------- - @# Expecting the zOS host, python version and zoau version to use with - @# generating a configuration for us with zTest helper. - @# -------------------------------------------------------------------------- + #ifneq ("$(wildcard scripts/profile-shr)","") + # @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin + # # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + # @rm -f scripts/profile-shr + #endif - ifdef host - ifdef python - ifdef zoau - @echo $$(${VENV}/./make.env --config ${host} ${python} ${zoau})>$(VENV)/config.yml - else - @echo "Option 'zoau=' was not set, eg zoau=1.1.1" - @exit 1 - endif - else - @echo "No python version option was set, eg python=3.8" - @exit 1 + ifneq ("$(wildcard make.env)","") + @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env endif - else - @# -------------------------------------------------------------------------- - @# When a quick test with no options and defaults are acceptable, a - @# lookup using the users usersname is mapped to a default of known - @# zos targets registered in make.env - @# -------------------------------------------------------------------------- - - $(eval username := $(shell whoami)) - echo $$(${VENV}/./make.env --config ${username} ${ZOS_PYTHON_DEFAULT} ${ZOAU_DEFAULT})>$(VENV)/config.yml - - endif - - @# -------------------------------------------------------------------------- - @# Check configuration was created in venv/config.yml, else error and exit - @# -------------------------------------------------------------------------- - - @if test ! -e $(VENV)/config.yml; then \ - echo "No configuration created in $(VENV)/config.yml "; \ - exit 1; \ - fi - - @# -------------------------------------------------------------------------- - @# Check if name='a specific test' and if debug was set, else run all tests - @# -------------------------------------------------------------------------- - - @if test -e tests/functional/modules/test_module_security.py; then \ - mv -f tests/functional/modules/test_module_security.py tests/functional/modules/test_module_security.txt; \ - fi - ifdef name - ifdef debug - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest $(name) --host-pattern=all --zinventory=$(VENV)/config.yml -s - else - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest $(name) --host-pattern=all --zinventory=$(VENV)/config.yml - endif else - ifdef debug - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --host-pattern=all --zinventory=$(VENV)/config.yml -s - else - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --host-pattern=all --zinventory=$(VENV)/config.yml + #ifneq ("$(wildcard scripts/mount-shr.sh)","") + # @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt + # # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt + # @rm -f scripts/mount-shr.sh + #endif + + #ifneq ("$(wildcard scripts/profile-shr)","") + # @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt + # # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt + # @rm -f scripts/profile-shr + #endif + + ifneq ("$(wildcard make.env)","") + @openssl bf -a -in make.env -out make.env.encrypt + # @openssl bf -a -in make.env > make.env.encrypt + @rm -f make.env endif endif - @if test -e tests/functional/modules/test_module_security.txt; then \ - mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ - fi # ============================================================================== -# Run the sanity test using docker given python version else default to venv +# Self documenting code that when comments are created as expected, the help +# is auto generated. Supports multiline comments when comments are prefixed with +# 2 pound signs and a space, see examples in this makefile. # ============================================================================== -## Run sanity tests either in the virtual environment (venv) or docker if there is a running docker engine -## Options: -## version - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', no selection will run all available python versions -## Example: -## $ make sanity version=3.8 -## $ make sanity -sanity: - ifeq ($(DOCKER_INFO),0) - ifdef version - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --python $(version) --requirements --docker default && \ - cd $(CURR_DIR); - else - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --requirements --docker default && \ - cd $(CURR_DIR); - endif - else - ifdef version - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --python $(version) --requirements && \ - cd $(CURR_DIR); - else - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --requirements && \ - cd $(CURR_DIR); - endif - endif +## Help on how how to use this Makefile, options and examples. +help: + @awk '{ \ + if ($$0 ~ /^.PHONY: [a-zA-Z\-\_0-9]+$$/) { \ + helpCommand = substr($$0, index($$0, ":") + 2); \ + if (helpMessage) { \ + printf "\033[36m%-20s\033[0m %s\n", \ + helpCommand, helpMessage; \ + helpMessage = ""; \ + } \ + } else if ($$0 ~ /^[a-zA-Z\-\_0-9.]+:/) { \ + helpCommand = substr($$0, 0, index($$0, ":")); \ + if (helpMessage) { \ + printf "\033[36m%-10s\033[0m %s\n", \ + helpCommand, helpMessage; \ + helpMessage = ""; \ + } \ + } else if ($$0 ~ /^##/) { \ + if (helpMessage) { \ + helpMessage = helpMessage"\n "substr($$0, 3); \ + } else { \ + helpMessage = substr($$0, 3); \ + } \ + } else { \ + if (helpMessage) { \ + print "\n "helpMessage"\n" \ + } \ + helpMessage = ""; \ + } \ + }' \ + $(MAKEFILE_LIST) -# ============================================================================== -# Run a bandit security scan on the plugin directory -# ============================================================================== -## Run a bandit security scan on the plugins directory, set the severity level. -## Options: -## level - choose from 'l', 'll', 'lll' -## - l all low, medium, high severity -## - ll all medium, high severity -## - lll all hight severity -## Example: -## $ make bandit sev=ll -## $ make bandit sev=l -bandit: - ifdef sev - @echo $(divider); - @echo "Running Bandit scan with sev=${sev}"; - @echo $(divider); - @. $(VENV_BIN)/activate && bandit -r plugins/* -${sev} - else - @echo "No bandit sev (severity) has been set." - endif # ============================================================================== # Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured) @@ -443,19 +398,38 @@ install: @. $(VENV_BIN)/activate && ansible-galaxy collection install -fc ibm.ibm_zos_core endif -# ============================================================================== -# Check the version of the ibm_zos_core collection installed -# ============================================================================== -## Get the version of the ibm_zos_core collection installed + +## Copy your ssh key to a `host` or the default which is your username. Then +## copy the super share mount script and profile for the mounts, execute the +## mount script and exit, upon rmote ssh, `profile-shr` will be located +## at `/u/${user} where user is defined in the make.env `host_list`. You must +## have set up a venv `venv` as that is where the environment script and configurations +## get written to manage this make file. It avoids continued decryption prompts to +## force users to set up the venv via `vsetup` +## Options: +## host - choose from a known host or don't set a value for the default operation +## which is to user your username to look up your default system ## Example: -## $ make version -version: - @echo $(divider) - @echo "Obtaining Ansible collection version installed on this controller." - @echo $(divider) +## $ make mountProfile host=ec33012a +## $ make mountProfile +mountProfile: + ifdef host + @make copyKey host=${host} + @echo $(divider) + @echo "Copying mount script to managed node and executing." + @echo "Copying profile-shr to managed node." + @echo $(divider) + @${VENV}/./make.env --files "${host}" "${VENV}/mount-shr.sh" "${VENV}/profile-shr" + else + @make copyKey + @echo $(divider) + @echo "Copying mount script to managed node and executing." + @echo "Copying profile-shr to managed node." + @echo $(divider) + @$(eval username := $(shell whoami)) + @${VENV}/./make.env --files ${username} $(VENV)/mount-shr.sh $(VENV)/profile-shr + endif - @cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ - |grep version|cut -d ':' -f 2 | sed "s/,*$\//g" | tr -d '"'; # ============================================================================== # Print the configuration used to connect to the managed node for functional tests @@ -474,6 +448,7 @@ printConfig: echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi + # ============================================================================== # Print the make.env contents # ============================================================================== @@ -489,6 +464,7 @@ printEnv: echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi + # ============================================================================== # Print the make.env contents # ============================================================================== @@ -504,6 +480,7 @@ printMount: echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi + # ============================================================================== # Print the make.env contents # ============================================================================== @@ -519,186 +496,235 @@ printProfile: echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ fi -# ============================================================================== -# Cleanup and teardown based on user selection -# ============================================================================== -## Cleanup and teardown the environment based on the level selected. -## Options: -## level - choose from 'min', 'all' -## - 'all' will remove the venv, restore any temporarily located files -## and ensure config is encrypted -## - 'min' will restore any temporarily located files -## and ensure config is encrypted + +## Display the z/OS managed nodes available and configured. This will show which +## systems you can use in the host argument for `make test host<....>` ## Example: -## $ make clean level=all -## $ make clean level=min -clean: - ifdef level - ifeq ($(level),all) - @echo $(divider) - @echo "Complete teardown selected." - @echo $(divider) +## $ make printTargets +printTargets: + @${VENV}/./make.env --targets - @echo $(divider) - @echo "Deleting python virtual environment 'venv'." - @echo $(divider) - @rm -rf $(VENV) - endif - ifeq ($(level),min) - @echo $(divider); - @echo "Minimum teardown selected."; - @echo "Deleting files = [make.env, mount-shr.sh, profile-shr]."; - @echo $(divider); - @rm -rf $(VENV)/make.env - @rm -rf $(VENV)/mount-shr.sh - @rm -rf $(VENV)/profile-shr +# ============================================================================== +# Run the sanity test using docker given python version else default to venv +# ============================================================================== +## Run sanity tests either in the virtual environment (venv) or docker if there is a running docker engine +## Options: +## version - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', no selection will run all available python versions +## Example: +## $ make sanity version=3.8 +## $ make sanity +sanity: + ifeq ($(DOCKER_INFO),0) + ifdef version + @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ansible-test sanity --python $(version) --requirements --docker default && \ + cd $(CURR_DIR); + else + @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ansible-test sanity --requirements --docker default && \ + cd $(CURR_DIR); endif - - @if test -e tests/functional/modules/test_module_security.txt; then \ - echo $(divider); \ - echo "Restoring 'test_module_security.py', previously removed to avoid execution."; \ - echo $(divider); \ - mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ - fi - - # Unsure really need or even want to do this as part of cleanup - # @if test -e make.env; then \ - # echo $(divider); \ - # echo "Found uncrypted files, encrypting them."; \ - # echo $(divider); \ - # make encrypt; \ - # fi else - @echo $(divider) - @echo "Default teardown, deleting $(VENV)" - @echo $(divider) - @rm -rf $(VENV) + ifdef version + @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ansible-test sanity --python $(version) --requirements && \ + cd $(CURR_DIR); + else + @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ansible-test sanity --requirements && \ + cd $(CURR_DIR); + endif endif -## Copy your ssh key to a `host` or the default which is your username. If you are -## copying a key to a production server, a second key will be copied used by the -# jenkins node, this minimizes the number of times you must copy a key. You must -## have set up a venv `venv` as that is where the environment script and configurations -## get written to manage this make file. It avoids continued decryption prompts to -## force users to set up the venv via `vsetup` + +# ============================================================================== +# Run functional tests: +# ============================================================================== +## Run collection functional tests inside the python virtual environment (venv) ## Options: -## host - choose from a known host or don't set a value for the default operation -## which is to user your username to look up your default system +## host - z/OS managed node to run test cases, no selection will default to +## a system registerd to your user name, see make.env +## python - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11 +## no selection defauls to 3.8 +## zoau - Z Open Automation Utilites to use with the collection, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1 +## no selection defaults to 1.1.1 +## name - the absoulte path to a particluar test case to run, no selection +## will default to all test cases running. +## debug - enable debug for pytest (-s), any value will result in true enabling +## debug, default is to not define a value so that it evaluates to false ## Example: -## $ make copyKey host=ec33012a -## $ make copyKey -copyKey: - @echo $(divider) - @echo "Copying SSH keys to the managed node authorized_keys." - @echo $(divider) +## $ make test (runs all tests using default users system and dependencies) +## $ make test name=tests/functional/modules/test_zos_copy_func.py debug=true (run specific test and debug) +## $ make test host=ec33012a python=3.9 zoau=1.1.1 name=tests/functional/modules/test_zos_copy_func.py debug=true +test: + @# -------------------------------------------------------------------------- + @# Expecting the zOS host, python version and zoau version to use with + @# generating a configuration for us with zTest helper. + @# -------------------------------------------------------------------------- ifdef host - @${VENV}/./make.env --cert ${host} + ifdef python + ifdef zoau + @echo $$(${VENV}/./make.env --config ${host} ${python} ${zoau})>$(VENV)/config.yml + else + @echo "Option 'zoau=' was not set, eg zoau=1.1.1" + @exit 1 + endif + else + @echo "No python version option was set, eg python=3.8" + @exit 1 + endif else - @$(eval username := $(shell whoami)) - @${VENV}/./make.env --cert ${username} + @# -------------------------------------------------------------------------- + @# When a quick test with no options and defaults are acceptable, a + @# lookup using the users usersname is mapped to a default of known + @# zos targets registered in make.env + @# -------------------------------------------------------------------------- + + $(eval username := $(shell whoami)) + echo $$(${VENV}/./make.env --config ${username} ${ZOS_PYTHON_DEFAULT} ${ZOAU_DEFAULT})>$(VENV)/config.yml + endif -## Copy your ssh key to a `host` or the default which is your username. Then -## copy the super share mount script and profile for the mounts, execute the -## mount script and exit, upon rmote ssh, `profile-shr` will be located -## at `/u/${user} where user is defined in the make.env `host_list`. You must -## have set up a venv `venv` as that is where the environment script and configurations -## get written to manage this make file. It avoids continued decryption prompts to -## force users to set up the venv via `vsetup` -## Options: -## host - choose from a known host or don't set a value for the default operation -## which is to user your username to look up your default system -## Example: -## $ make mountProfile host=ec33012a -## $ make mountProfile -mountProfile: - ifdef host - @make copyKey host=${host} - @echo $(divider) - @echo "Copying mount script to managed node and executing." - @echo "Copying profile-shr to managed node." - @echo $(divider) - @${VENV}/./make.env --files "${host}" "${VENV}/mount-shr.sh" "${VENV}/profile-shr" + @# -------------------------------------------------------------------------- + @# Check configuration was created in venv/config.yml, else error and exit + @# -------------------------------------------------------------------------- + + @if test ! -e $(VENV)/config.yml; then \ + echo "No configuration created in $(VENV)/config.yml "; \ + exit 1; \ + fi + + @# -------------------------------------------------------------------------- + @# Check if name='a specific test' and if debug was set, else run all tests + @# -------------------------------------------------------------------------- + + ifdef name + ifdef debug + @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} $(name) --host-pattern=all --zinventory=$(VENV)/config.yml -s + else + @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} $(name) --host-pattern=all --zinventory=$(VENV)/config.yml + endif else - @make copyKey - @echo $(divider) - @echo "Copying mount script to managed node and executing." - @echo "Copying profile-shr to managed node." - @echo $(divider) - @$(eval username := $(shell whoami)) - @${VENV}/./make.env --files ${username} $(VENV)/mount-shr.sh $(VENV)/profile-shr + ifdef debug + @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} --host-pattern=all --zinventory=$(VENV)/config.yml -s + else + @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} --host-pattern=all --zinventory=$(VENV)/config.yml + endif endif -## Display the z/OS managed nodes available and configured. This will show which -## systems you can use in the host argument for `make test host<....>` -## Example: -## $ make printTargets -printTargets: - @${VENV}/./make.env --targets -## Build the changelog, this should be a release activity otherwise the generated -## files should not be checked in. +# ============================================================================== +# Check the version of the ibm_zos_core collection installed +# ============================================================================== +## Get the version of the ibm_zos_core collection installed ## Example: -## $ make buildChglog -buildChglog: - @. $(VENV_BIN)/activate && antsibull-changelog release +## $ make version +version: + @echo $(divider) + @echo "Obtaining Ansible collection version installed on this controller." + @echo $(divider) -## Update the documentation for the collection after module doc changes have been -## made. This simply calls the make file in the docs directory, see the make file -## there for additional options. + @cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ + |grep version|cut -d ':' -f 2 | sed "s/,*$\//g" | tr -d '"'; + +# ============================================================================== +# Setup the python virtual environment, the default name is 'venv'. You can +# override the default name by exporting the variable VENV: +# (1) export VENV=venv-2.11 +# (2) make vsetup req=requirements-ac-2.11.12.txt +# @test -d $(VENV) || $(HOST_PYTHON) -m venv $(VENV) +# ============================================================================== +## Create a python virtual environment (venv) based on the hosts python3 +## Options: +## req - your requirements.txt else a default one will be used ## Example: -## $ make buildDoc -buildDoc: - @. $(VENV_BIN)/activate && make -C docs clean - @. $(VENV_BIN)/activate && make -C docs module-doc - @. $(VENV_BIN)/activate && make -C docs html - @. $(VENV_BIN)/activate && make -C docs view-html +## $ make vsetup +## $ make vsetup req=path/to/requirements.txt +## +## Override the default virtual environment name 'venv' by exporting var VENV +## $ export VENV=venv-2.11 +## $ make vsetup req=requirements-ac-2.11.12.txt +vsetup: -## Cleanup and remove geneated doc for the collection if its not going to be -## checked in + @# ------------------------------------------------------------------------- + @# Create the virtual environment directory if it does not exist + @# ------------------------------------------------------------------------- + @if test ! -d $(VENV); then \ + echo $(divider); \ + echo "Creating python virtual environment directory $(VENV)."; \ + echo $(divider); \ + $(HOST_PYTHON) -m venv $(VENV); \ + else \ + echo "Virtual environment already exists, no changes made."; \ + fi + + @# ------------------------------------------------------------------------- + @# Check if files exist in venv, if they do we should not decrypt/replace + @# them as they could have edits and risk losing them. + @# ------------------------------------------------------------------------- + + @if test ! -e $(VENV)/make.env && \ + test ! -e $(VENV)/mount-shr.sh && \ + test ! -e $(VENV)/profile-shr; then \ + echo $(divider); \ + echo "Decrypting files into $(VENV)."; \ + echo $(divider); \ + make decrypt; \ + mv make.env $(VENV)/; \ + mv scripts/mount-shr.sh $(VENV)/; \ + mv scripts/profile-shr $(VENV)/; \ + else \ + echo "Files $(VENV)/[make.env, mount-shr.sh,profile-shr] already exist, no changes made."; \ + fi + + ifdef req + @if test -f ${req}; then \ + echo $(divider); \ + echo "Installing user provided python requirements into $(VENV)."; \ + echo $(divider); \ + cp ${req} ${VENV}/requirements.txt; \ + . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ + fi + else + @if test ! -e $(VENV)/requirements.txt; then \ + echo $(divider); \ + echo "Installing default python requirements into $(VENV)."; \ + echo $(divider); \ + echo $$(${VENV}/./make.env --req)>${VENV}/requirements.txt; \ + . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ + else \ + echo "Requirements file $(VENV)/requirements.txt already exists, no new packages installed."; \ + fi + endif + + +# ============================================================================== +# You don't need to activate your venv with this Makefile, but should you want +# to, you can with vstart. +# ============================================================================== +## Start the venv if you plan to work in a python virtual environment ## Example: -## $ make cleanDoc -cleanDoc: - @. $(VENV_BIN)/activate && make -C docs clean +## $ make vstart +vstart: + @echo $(divider) + @echo "Activating python virtual environment 'venv', use 'vstop' to deactivate." + @echo $(divider) + @. $(VENV_BIN)/activate; exec /bin/sh -i + # ============================================================================== -# Self documenting code that when comments are created as expected, the help -# is auto generated. Supports multiline comments when comments are prefixed with -# 2 pound signs and a space, see examples in this makefile. +# Deactivate your venv # ============================================================================== -## Help on how how to use this Makefile, options and examples. -help: - @awk '{ \ - if ($$0 ~ /^.PHONY: [a-zA-Z\-\_0-9]+$$/) { \ - helpCommand = substr($$0, index($$0, ":") + 2); \ - if (helpMessage) { \ - printf "\033[36m%-20s\033[0m %s\n", \ - helpCommand, helpMessage; \ - helpMessage = ""; \ - } \ - } else if ($$0 ~ /^[a-zA-Z\-\_0-9.]+:/) { \ - helpCommand = substr($$0, 0, index($$0, ":")); \ - if (helpMessage) { \ - printf "\033[36m%-10s\033[0m %s\n", \ - helpCommand, helpMessage; \ - helpMessage = ""; \ - } \ - } else if ($$0 ~ /^##/) { \ - if (helpMessage) { \ - helpMessage = helpMessage"\n "substr($$0, 3); \ - } else { \ - helpMessage = substr($$0, 3); \ - } \ - } else { \ - if (helpMessage) { \ - print "\n "helpMessage"\n" \ - } \ - helpMessage = ""; \ - } \ - }' \ - $(MAKEFILE_LIST) +## Deactivate (stop) the venv +## Example: +## $ make vstop +vstop: + @echo $(divider) + @echo "Deactivate python virtual environment 'venv'." + @echo $(divider) + @. deactivate # ============================================================================== diff --git a/make.env.encrypt b/make.env.encrypt index f1b9636a2..d07e7032e 100644 --- a/make.env.encrypt +++ b/make.env.encrypt @@ -1,275 +1,287 @@ -U2FsdGVkX1986PbRMb2EokSrLE9lJ2+nW9OfuyA0vNn39kfHerFqT6axJCldzuZS -6cIbHi/WZtTpwjxUUKChgjSrLtZ9o4IlDPBn5qxMMxtVLhfJmVDwOvUy9NvAtHJz -tfIOOZvvZzDTJ7ewkywkyxk94JseMdn5/GhiHinpOin29cNYZ6cuxeYE5Lihz9tt -3vP/S211Oi1LGYjd5kfuoSrfr/7Pkdhd/nkyiL/r7yOtPtmSeU++D5VQsbLzYfRz -Umo983TNZJ25w3FMsrbZWR/1EPXd5Dv4+S+FAmZES8YSi4lwmnFbHnnJhB9XBV8R -K/+puWCgOH8UiAdymvaaAlG2rRzu8jQtitrOnSbONOCJn0+Fh/wW53JVsGdLjUS6 -a2ZKP+g8G3KR1aJkPG2NdjG+4IzoCaa5G9/YPP+tZI+6rKrPm3piabtxxerp+N4d -fzwWrmz8CL1ICnU/0ySzIGdKYDnF6oB57vRVWanUYknAFq9s+tofGnt0c/T3X9V5 -aOKuX+7XbLEapuvsR4ghd7uYhi6eh6s8vmc9gFYJo39tcU92M2w+8bz51CskwXDM -WEcYLnue9/yUK3fdE6CMTbFtIhlXDw2IAA82rracXoCEPZFtDSvROG0W5WLMb52J -xLE6DJ4nPAPYAfgcj7xzRzir7WRgclrWrnDXCCXlDcxXO38BgZNJ9Cu0+f6Ys3dI -1yyAagxixTJw1u6Etk9ictbr/QQYWhQwqXPkPSrelPbJ5chQrdoxN2MrPuRUm4Ui -QNa0ug6eV0bsolaqlwCbbxoqmZlf9Aga1ePoFhMicj3Jzy/8A8NIx1LhiZuLnqiV -QRhnlIVUL1fD3HojXxqVyQVM8pqAb22uzdS881gomH6BEK7B+v7gcYKGcglCMaFW -fUqx0EyHIzBTGtwldrymbshmLgcYlfuYl40eYCF9l0PDN9/azw4xstv91VF85ZR9 -5lZ88Q6/3rMBfDS8ZxwEXDIoJ78giMqjHReaQgUtVUzEgXJyTkXCH8GS5S9Ct7YB -09Gf/e4IU5EYEWO+Y6vCXyIpY23cE/mzBLTDichT4L64chc4qUX7ogr7YvEvU9LS -Ga1OOaS4mJHqmZUahGa3aDsCx5Aozs3R1Js5Em83Rr6lK0fQVevCqVagbcIrVUls -vOnugf+0wAo3YaOeJypT5JkH4JlwPO5Gfm9YJ6rvvQTugkwp6BfxnRt91oJzPoOU -LgzbLYcYnersdpXoQIvnUPsF7cPxdY5+rS/cllSx+dnoHzqaNrqOkfhn7PScIerT -fGOWrPd5gH4uOKshc/bTybp4lSgbBEQGjD3HUjuFxyfbt63MsqxB58BV25tzmabG -VJq3Z2HbD8xlKWhoplFR/QW7RpQ7yyuzyRbF9a2M4dwSP51XkNMzA30OjvqzWrUu -6s0vDppVM8iTT9XE6SyGSnKEEOIm9XzXEsVD1ZlE38QJxYo1kl6DMPRiDq+okaWl -Kbv0693AZulL2hHXtQvMufxFNNAfoO5jk8Jr6rRVXMpsRRneYn53cyAzMuk17SQD -X4LfN78mOTc/6qadfv3t7ZNCBeT9pEYWfqhed2hk3CzvfdqceX2dimTNySxcNMDu -ukrG6vJfaO16HuVnXDT/V+WDBmEDhaeadrjDS8u0AcBGbtGxjXIHqoMJf8sxEqDV -cRQuAKaRwQcAVbUoF8pwWLvEDBpw47kZCVm6202FwI8DQngWVHlJDCvDoU86n9ks -9WUSuHJSWXoYurLdXU+0HQPGcwEbjvn9GXK8UyPlF3CZmz9RJT5Wr4Wu4p1ZACqD -4QMW8vvST4yuT3mZEndqrSNCvWf1M24jhap4HY6eKSTdHaEAEhIpnbcaR+pBIiH8 -QiK/hTRsTqV5cFQYN3x3hIQQHGIFXgutSmjuIWQQ8kPVize0qE5qdgzWmCIQtqp+ -OVZ1sux46edT2sAN2cwuL4b2sc8G3yMrEA4L6imf8Ea4mRQEQvf4RD5A9Eq36z0K -SJzvva9HHZK7NIMsY4yRt1GWTyNyzApJ4dywzDc6cLvf42O/NSlHJ0NOANDUQ4eS -mbQT+oZHwCWA13W4XAlaLesakfc2E8KFMyIv95j8BeySp8gnrGz51wGMIeLeuDRP -g2CfvXZzPfea7jagkqfMJ1+q7pI/ItrQ7ccrIwfg4gQL7gEE4trGiOvw9RjTC+Lc -wm7Wj6bEVXzONR2izvFnF+PAmdPfV6xMFrGpHaQzUcvqQ5bHIgNLl11IIas2tZH5 -RK0c2K2COaX5ZmRflPUK32vCkRgCJ0x5b61X66q3J6XNxOb91RAU7BKhft9Ud+tn -gT7RWFZNQ11nfui7kplDaLEmQac4dIcYw69n3QobaQgGEzJyNqdKHFW9dJYRRyy5 -cKuHQUl4xvY/AA+/bL1HqXJFAobLQ0O/eginE0lG0qpqOuERizIUsV5ZDYLv4nWi -6qpxpIzkTiipNfyU2jNTZnSebSKlDJTFwsXM4RDHcecqFTRYwtnGQJYzaUnrRGPW -zkUy39VyT7BepcpPVbmi3PSW3LeB0FmIJ3dMihgiAHAa9fuAFItX16VnNuXuTvYf -ylHa04LNjl1iaqSo7vjejLeNrbUplSKAuhvlFVi1PolGjglX95qMyh4KiI/UYCE8 -7YngRWqYWEjlUK94UzYrCBI/snlwXUoMLfGXzFbbPnccPR0q6AbSf6bjNGns6EH7 -5349eOHjB9kSQsNU2viDxX6TIGi4T6X3Yu9NeE4mbIaU/nC2NjkYMfKDzer2G/aK -xQhC5P1k9LVKvtj4VTQqe+/sfCMvy5hJGvmKJfmT6MzQ2wRtqO+A4xDguNcPc/ti -uduNCcxhs0d4UmbchoSmE03C+oQ+Ql1Hj8OAOMoPeCisBuVhaFNQ7g68t6xVuS0d -YLlqtSVbE12W5mzZ3CRslKDOLzRWFW6vp7AY+eO1vXPg19B0BVLwuSG8t45AIzNd -GkkYiNABfZc7oj4OLi4ONxPzrPvNGuN8tytFtXbL9mAa4v0DagUu2O+4fdnGsShY -FrriLGry1GCd0wWECuZ4TeFB6+qYJs41Ksqe2aK5w4njZABKv4IrqMX3qdzGpzu9 -xS+ob7gzunScYlkV4epfAJIZXZYtf1qiHkK1V3ButA3rlQT0vK5c5UNLJRr+0ebc -DJtlHUaCixQ582mm4rjbu7yOeYek+Cu5Y/MVbHAcGJ6QC6wm5FQSE05pcD54JdZa -9LF6raw+APkNanBW8hJNSFf/ZN17Lf9bkACeq8TKlF6feH6mHOKokxCbQMTeJVLU -/4Z2eRhUajN0mXePITbxiDAvCWImsx0qD8BIX5CStGI0LGK1eAEbQWs3PKrctXm2 -l3RxNMhskAa7KX4NZr7tsd45oa5znMwCKxsLCqLEY8G13fAt6PfFeMWTC2AkrbPo -tRVVFcy7/VONbBl9+OGZjD2ZeOYKy8rqNLxKjwpxjzi0cQWx20NUHPjc+E6m5eiz -pSJoxvydLCyNwgoL5RkjZWHURfIaurb8dQx/08nQeiEtHC8RRxNcrE2nF8u3redw -14LTQNZkx1XLgWxFt/KoCjd/GEDxN9Z9sB9HNYIKxq4RA/bx398SPRYMe7NBJw4j -vshpelYHXb2Mq8jQQBehGEV0cTtr8yHekP2og4EevSXN8bcGZ6+kxADjaixjvg1W -uQL0omvE2rBGxkC5zADmAn4QAbbVhwu3xHU7/1fKFTTjCEumFzY7rvaWz2/Unvb+ -xd+FpwNH+rAzyXB93hJ5ZjmQkkzdOm0YbD/xS0wrPBeaziG4JXAIORSGGUL4QwRx -O0ae7N+cPzQbZAGty7YMo7twPY5IzeE0cF+7MDPirEI6oLfQAyqA84jLoFasQ0nE -aOKE63P92nQY5dxmsWP1YYTiGdRW/vUyC+6lhoufu/KB5gXO+n/HV0Iot6p7dX0X -FfCjBoT8Gco1zFgoa7OuLW5Z0ZaNLBTeYp2j519T0CTKFlXBN2Fd1Cj9hufELgXJ -mMa3Ykey0VO1N/Yv2CeVUR9KlKBT49Ax2EcJmNizIpdcdMJ0oXRSoBjHOPxHwMEG -BCyCh/BhSVOjp4mqkvhVmXHLJu0OV+QeF2A7pKQx3eVCQx1eEkO7mB0JUHFQkxUy -I10dWM77g/MBvOqFNvk8EI3ifeC11l+BXfq7FrX/Ne/MupgJPTllQBEVMGv0+LUE -UXshO4iCaR4UqGz4IN4TLSmVWo+FGGFvfFTd9CocwAf36OGf4p0lMYmceXCL0Ojm -Zr3JMa4XEblDwFQcfjyXxuFkzqGaRjCNn8hXvgabyyyywCBTKL5PLuGYNFsbjE8r -sCdQ3ggh8hdcJAHWMUKvj5PoPOlrHg0ZJLaYEXunpl/VpmJH0gGwP87KG/3MbDX9 -pijHjT0ba58uE2mFDkAz8ZDykdWKwpfoO3wVhZkvsHffxGkTM7hcRdPU9H+aTkdu -wgs3oVAI1MvsvyhWycNXG/Hl+KwgwhJDRbH58kKzjJApfkR72nPmHPeZJd0Ovt7L -FxgOGB5K4MewoJCMSz9uXALStvv182kyj2izsTCH9EcQjJMhYHcXSRra6x6HNWtS -KSqajU19bnWJb1kduxLY0HycVXhema9nvUVt2exmyL0q+0loDqh7MCZqtI0oMD11 -jlcYl/Krr2dSzun5rlNh1Q4ufCvFttwUiQvPakTqYvrGK6pU20LHTFm+AxrDZI2M -SIYT2vGLj1hZEU5pWpw+hiFFDsgWQ0ui/Gu9tNzzwmprtBsw//qmBTmVnXWCa1Sn -60bj9/8zGVTHN3iBFr190W5PsNh867kgX8D+sspSb//JCSCm9H87GsC94zjyGL2G -jlvM+Flmwm8DhWfa1tH8KEoKz9c1YPj0N07NNRy/XlyHP3t3srrdpiizTj3HCmxp -0mxyhaa8zsoMmWN+FabVGHeyv5j3faGPgmSpqc6q7hSl0CivAmUyPm51kfyjsAxZ -oDuP0ijIZtTUTQcW6V2yaMthc9lQbpwX/DfjK0VAwCeG2sQs8fPtUfFgbPa9NQAJ -lCB0r6s7B+ZEtlY7bkg4iOav82/RoXlPLDAKVeLRPYR2/v1hvLlFn5BEuJgnxBGB -b9yrIMb/qu4a45l3gXRfaPWSZqQo9/FKpD5DBWTVUsoayvMelA4KwYltFwinsD3b -SO0towg3JVQLMLCs3xKqVAj9A9Dfnvlia2draldJggb9gAQ+YA+2kxCYl9MeJWwL -sxSXdwO+3zfKnwaoGt1MadXUIldPS+ocWNoQq7Yk2aHZufxawpsF/5TfXAPb5J+r -5pgxLx83gqIFbrOlrJjrh7BRKEbEYaVUO6S0HdGAbkCQXGOi7XAP+EWze0Jht5As -sopeONoBxE5wk6pj/glcr46q0SJ72uqSsf/+nEvD2QmT6o+/fJKqrwSYo5X3K3pS -OCgeunKkqKvWWpXSH5pcy3cVVVjui5dgyBfYsDJtGvLnsk+gCudmkpJGuBiAr0Mp -amowujtzaYjYqsqjdAUj3L3ib55EUqHhCMo6JnykBJyiwXT4u5GSVVDWw7sb5cV6 -B9xUFXsrnT52+WdHaujJYNlFo+eV1dm0EGhWh65tVgwfKHl40nFjYY77Mk2/aTBi -RqAssGcn+ODSuDZuO4Le0HcH5VcdaLQt9Y6uV02fPs2D9Kjj5SszkCnjUAqYanio -L1naFHEer40zQ5hCyKio+Z1AdIWEYRxYHQKHy8ED2zTAjWjk2/eldUvCGC688fF8 -n0GmwJHZ34a5buSZ9Z0rYQnqdTxIsKCWvlmaonvC5QpiJveIhH/WKNQ2Hhjyt7I1 -VaQjnwKOC7qFjJyW+kRNF/gTjw+AiojWeaYsb2AVmKJJtepd1XvznRSdeuV0VAvF -oN365AHY2NYjosMBZIUbom1tma6HLb945PC1WE5SN2VJcp6kMBoRCMkparz8g0aP -Rgm64ecXkTnf4QuMIWsTB1PEeS1ZymfngxUPgDj7ltEpuJ1lU/kTxI6o78w+0JqC -ww/UnY+3c2ZjpFCKdIOB4b/SvpwVrO8vYh8i/75DS5J6Ouva/ea8HGx/I2dDDJbo -DqHDtA9ggJIBz3Z/T15ySBFVeosDWELVNwfF5hSI2J78b9j+4xDy3htkGtFBKhFg -mdJIz9x+N+1UdTjodc1o84fNi3BLGYgUQvK95UrEMeU+rDuDhoWH1kWQvjXqfcFO -DVNxTmtnKVG8/l/LDyarDLGmW/mBmf0pUYfC2+C3qX/5fxH2CLMhSGbrNsrfyugf -MhCwthOI8NX473fHvIc24WqdxK2yYl5NYfR0TGablw42JdUsnmf/30lOm9jIZfqU -EzA4kVni0RkfTzrttGnhcpbpud+a0JMeLT0eLi6hlL6CEO9c6xpvjp+nDj5kE9tU -Yc/Qyw5CvLhsuFWc3uRd2XBF3S0XYsPcQCRi+jyp7S444vr5aOOFwQ89QkT9Wxzw -AB9qH+oZ695AXhLnQV93v46LwxopmYJ2krF/YHqst9lT9DepvOa+Oh9CVpMmOwAv -u4XYrPSOnCq4pCOrd2ZfBofdpYl9jvVgEjXB+53TraThfNXTBjphv4Z9o9+hBVnH -CmBL43t0e6FKYZV2BbZUR0uBpTE2ri/Tw17ZXJnY6s0seDXKXPImiQiSoo1fQFEB -8VQMDSXFcMg9r0Ru8unF9C79gITfa7l17cIx1G2bYWCPEOggn5srcU8xB2tD6ywp -Cfcx1ztwbxMrpCaXbg4yTyd1rzha/LkcuoSU2Y/FbfjKgqafWTHsHAYyeazJTKC6 -JXl8mXT6jAO7+AXQ68dhyUjWs4pKMZ/rSUV+c3FHfDOzgDUHy3K9rDQUfILVv1n9 -HDNYoTgAjFA8OIF4OQIReHqudgGltj0M+V3EtU9yeaWbfiCRTcma08sAhCvTU7G+ -yR11hcgkOpgj0OupYQ34iRWabvDSXcqG1pzB0kz+MWJOSG69t8k1RjBdneTW9i6H -TkF7tMRil2Lftyx7ZckscC53ICCbrrNJkBzZw7SGnoDJNwKfXgnJ8l1gCJBNNncs -SWI6ke27Jr8EDk99vXBsZ1Vql9TWeFAz7PqlmUoWwcWQVtlUZO0kCCGXBFsz0oeY -m8kWpHNnh6GXG8+Q5vOwV+pLPbdaB+/qufiFLKdCDsUmErb9bz8bhVb0foStESFq -HpaH1B0y6fHbOwrTPt//4uaEIKQBvcKnl1EYHscPWHwZ5LB4QDIfaiCSP3GVAlcO -WPNMdCuucmcYv2vxKmgrGzrUnjYFc9pYuShCmZCjkb0VfDROjx1l/j4dDOsCGPpv -tUJx0HsFpIK+2dl3DCN0JNXBJ6PPreHqqXnq2kYkwYWZjPYnCnUKAq9A4eTuxmvz -bQWGLASSdXrNjNbTAQi8dQvPLDrSK+Ao3c3Ji7UT4sBH3CfawZhgM0HzNum0T3NB -5OHZ3YBvyYY8PNCrDipquhiiH2T21X26FKTGvh9lSBFvF2QOSgYHJ+uEo9X69BQ4 -jpqady1CIycSR6nd+ux2RJZVoQ7m/r8jQ+gfawd08zZdTcI1GMpoh2/gRwIJ6815 -GEduBBlu1gYLkM5XGmGvgCLnP0iNiFQ+UC9E3gnPpMx/NWV2eiVnI6M3DaRsNEt9 -rIhR3ll719lj+IdXybaVInxv6KwT7VWTVYmXf/DOoGplsC3sUQcQ9mxczGIEFKXj -eompVnbLjjLrB4XRRMQ/N+fCrdp+yplBE35RUUEQuYXPelAniO7zNRF6h988PK1L -QNJJxSzCveKxnYkUZbAAUJk0wXliCW38JlGvRsT/3LBZkd8BN8Cd2Hp457Gg1Xsp -YNBV6FLiSMJtxaJSojYlYkV+KvjwaAGMw80WQ7klAyA77DmBRlyDLorug64QVWDC -ooyJjkRcdZYe1Y+oUY1yREYWVA94F7zeoPCgWJlF1oZ5W6Ampc6gBx77yTZPz+WU -BX8GFYOP0347Dh2+OLEIALq2itQsw1/Wgb+tUa30xcUMjWdvadT2YYVGkTm5ffwS -bpYMTzhed+Slh4pJd+I86HBSWIqzi8qpN9G/G/X1iMA5ZLAFoK666lmInaFvsi1x -Bd0hoa0VXLsXKITJRXoKgom5E0A/0UXslreA4EdqAoS7ce4dcF0GbbmNC/abY4AZ -cMWde9XqUhW4qnl+A1CAUX11lwXiJjY/wfmosIneHGGIcKOYqdjVQuF/sZgsirM4 -HneU5zGVrxW9nlvceUJjEwJFvXVsIUBds9LVY/GjD5eBrd+waGO5KziAn5oAGbjv -cRVo/bqS72JSpx1vdVGDghoFJpLhfy62hInWE0ST1Ggmv4EYaxplp+KkT092F+BK -d2q4KMJ+dgIZ/zBCtB4OsXbpVLZLUO1cB4N6J4w+8gbKLG5RhUNIRycXJF3p0Mf0 -FjVmI1lCDi4m0I2+BfkUkrFgHoDo/DUsgSffzlvspq+keYLuzlUeZGbfU6QQuKFU -8yC7HdDEc1Y/TylbtqdCWslZisHGxsDaOWE/qubLA/Fb01V3OWk5f7ROfIenyfgF -HWA5wcRtUhhPyu1gENQDI1tXAN0quOgbrO0gjApPL42CR5VSUXmBmsNACtm8qkeV -Whvjem4ei8bbW+tUkNGidJMkArBBgISudFFLaIOymX+RV5JVQXSN+1O332VGT9RM -YPfRziBSfMgO+7OXEWN3t6p93KOZtZxoX098PYLFxNwSKAXMHU8Msscz1AkJbRTC -uZw9lAbWQd2CdJW8Us8gegujVftF2HCyj5XYeWzZvQUDklb0GhURzh/Thx9HbuWs -p14rat/v3NoHaE2WOhJkj4BuU3H4JizEMq1wMFksNKbtsQa9ms0UKH8jmMnZod0M -xTIOoelcOObeEoEztCX8lHVXYIkmaVPqOU8EPgfLk/O5HZKtDHuieR2K/2PDcLyK -uMM7hoPTZI8LTXw09bhML9dgJJY0xKycJbWcBsI29VDvwaEk7pztk1RcVFU4m3nz -SWnhinsCRkLZyy3FXWtNRaLUAJWHfCZdjrTM1OuJio1Jk1jkm/aGAESOt9F7++Qv -316WOZZ1cdLq5HqNlnmvZswM5xyM3tDt0Wjnjx93G4m8aDtG1f8+5+Q8tzeegmAz -9ksO7mKenJtX+9vCIkyit/6SWa+EZmwNVfpY/4n/xexsN74bSuZIwDEDYfTcCqbd -1iTiAMX1Kt02XhrLCW+MoZWWy2FXyTrR6JEp4pwvwzeDBNZpZrvM0Lti0M+cpVeF -7jK1EAUXskVL+wGTsc19O9Yg0VVK+o1h7GVRpogfHcPHPa+3558U7yb1S87p3Huy -0g8FX0O7AlUN8AQ4slZm+eXPOUmW7DAZ09RZwrUKih6tozpI+i8cImPo1WTQxmPX -WbLu1SbX4cX7FRLWci2puYaBsZifaasY0J6K6rkqzbMc1onB81QLH8T32VrdEdrT -a0lQXPbGMI3MLIAMxAyLHmkP/el60ZAQyqOHK89D3+fBdJL49cuPq66qdRmYkdqK -wJBqtJBjFMAh+WNMMzezV+fCI3+fSpZgsBlLqdUq/COz/8PdHpFFimkcjM+V5nE4 -BR/t3eioEfU1XjUZua2xrVKw3B4q7UpnKvbFQVKnzKOT9mEta01I0HjhLAApuAPP -G8ytLhf/SEwm0hrxP1XFN2+e2WXSOvXmnvVAjIaOxpw88yZAbKRhq4Vhdx179G01 -aXec3xWjrVzVqwKGbqCsyOukqmj/K2zu8R/eDeRDauCv/6J/JzNoFlBOp953va9B -PoAADmGLkUQJlWWtj4KKJTZxQIMhnsFs2FuZCY+7WGuZAVWxkIc0P3+SBlwIOKBI -Ob6KxQAI6K2NgDoLnZhJ5DsD5rDM15u7C3uWm7igjUVf1IOkCT5m9GZY3ZLpbLbB -3YMkOvCAR56FntmL8BtnRUUJ9cDZSvtaJhjq9UX5KCf5S3zN0+GhzLRRRuaTWT0Y -zF8DX8VFU8RybIssW2gM5DWz+k8UyhFmfpXObwxjNEm0ssoj3IHG4J1j2ssnPiso -WFO2W/U/dC4oc85mo4mvEcbAV2QxlYsSLpIdYzRfiQQGt7BmFOaE0saIqIxSD3lJ -6FHJsK2PA33uxRRzwgP13IAziOLdE6jAf2RGzjg7SrGc/kId90Kfn3C+oDAqOAkB -SikAA6SudIwEgwQiIA6XEgFEZNf6yFj7MbABgpD6pzJZiRx0b2AL1UzJQMnV6qPU -Y2WduJZIKmnwMYkEhVEVjUVQPbvdfoepHVoW1U+MrrgxyCFmQpT+GBDOJ5S4rzQ9 -kL6h9NJBWs+IRFbQDhWj3fXwhgBDxJggmL2tHerhEl2i+MVpEmgsLkj0ODGwxz43 -uBI9mrGbSf38rKIXQgInaqe3qQRV+gydpoatuOH6JOREURHUJSv7vt+glm7vUTOX -55JxxUorIEqDPD/AQeUxFNp68V6eoY3yirVMwOVG8VGW01t5KpFZulnlNzKKjGJY -v7Mj+Du3gheEpM8/cdEGML0SB7yhFiQfEXWWZNuyDaqLJTVqgX0bm3LB+Ir0rGxG -YTwtiOAZRCrBdXWTllTEf0XOiWLEIwbXSOicLvKRXNVE1NBLSjTVZBjN6IaXyIKb -vYB+NaAJEPGs4BqzpVrQFtQpoYho21k2s0WuJGtHo9leofy1LMwtkDjbtifIWXE3 -devuZ1CtpXKfit1lrX3g8dy0lxKJHJyBhMA9yS6aN++kecb8FdgXQIaWJsxP3ZsN -VXs7fc6w3nLIkYozGHpCB6GELolyQMfDDSt/yDSTcD2oRwtiR9MfjX902MrgVIdk -lVdcm7VvPqX5/Mvmouh/KEu9oAbZDxNbkCOJs487qJQ6p8ZzXx0vh7Co8eu3XAy4 -gFf2L4DI7O0q3iy1ObWZB+3vVvDzUZ8Jx8H0pRRHKSf+xito9XMuN2DpDrCzugEn -9x58sVbwnfdp+m2KjA24iLYLOQ7usb37jaju22Fr/nunb3wEFfBmsJ/cJzk0b6nD -Byilc46rD435al0fUAPeOZ5RqNzAefJth6jiFiPe09hi1bwUJrMNI6yekvsMQI2C -+VtW4n96Q1DVMCX91A9IYuXYpGCOBp+FACXMO8nEmCnGcm3z0HPJ7hIFV04/dBnP -RvCqNZGxjyT2o60Hq/rx0Vjdd9YRXEuT3ETlqIGgGag0wx7xWxPsCWEPxc0POZEw -I1Jq31EBXjzqRMsNoqxYkiu5yyLiFFrJ07l43qWDnHVEQiXSXlF1carexMrKvrr5 -HkCve9glK5Dbyd4RljTxinMIkXhC/IiU4g3SKOqDjYP/E0GOCqRx67S8bZ/iZ+Pr -yRaaPTVrcb72Wf8PxWDNIxl9DvM55vU0mL+/GJKu4+xZga8Gyr381fQ6nrqoRA4m -Ke9KE6bK+2N6RclMenfPHWeunGzHTVqYK8G+nXxPqEjDuKFMZmLdH8VSvb5ZrWnw -+Jxx726BOegl0F4E/9/qY0rqZQm2cMaBdrkkaIWjVe5BdZJfKyEk3thDkt8Dpzz7 -mWX/j+8KtIy61yAhevg7EMQ5LKqpUgbIqumtiyWYqkzm2gGiSM932velx1etu1C9 -6w0LzBibNuwNljpK8Jw1GWHTlAGm3hk32Zvpn4/wYEj8CERiNWV61aqZWZvUk8e+ -/oXOoL6c07EokMaBuCWuxo+/tMTEwP2P7Pj95SAN68lvu5kxkngVI6Pc8nlLT0Ld -X/dGtqpLzRvnd+yKP+XZmjNcKqzIMEN223h9HP3obrTSKGeHrsnNJCk4jwmfL0oE -LMK8Y16UN6wZmA5be6yA+aDmYV9RC5VxkgmpqJOz4mgxWwMCbZ3aKMf6+XRZgxS0 -IDmhNL930KuaNbdH1QSmc7qNNWMUz5Gvg2FKnsdBqw7RGxdtzBonQS26kxEmzNfJ -ghrEhC/plvsU/94zygto8whgSKoiQM3IS7U1FN5PPUjOsYqlpQxKmRtmgTajL+2s -MmhB0g/glCqxeK7g4OZcevyiUZ6JijCP48KRvQOnF12TE2aR1k8yeJyqk5nYixpu -9ff+cagN6bungJXymZK7kzccAF/hE/haV/dekliZjS/BBMSVdjapjGbQoQnn9LyI -30kXV5FCuiB4xwr5JG/ZAg+NaE0Te5zXbvzeKSsw4VAvuW8GIzmKGCtIyfGMuzCZ -MTxB7a99BsD2dJ5wbSS6j8JyYf24Jr7GzFoFadSlVvQIW/UR08/iekanboMu5lk+ -o/b+Pad8JCbXzHnItMbq/m8HI0ev9ZKE6EuZ/0Xne6tf6VtwZdpVdExTkFkVx41F -nWPCg6axY7ukmcPihvtZ8sDXAfL/1saVMHfaR0gfckFqqClLNruDn4b5hTRgfwgI -CT5phFswKva9uEHXJTWrXbjeujcFbgRTW7/qQkdmkODZr4E+UzJVxupax7CCurvP -4KLBUdOJeFWXtnKSlEhpEnK1zwTiS44yex7teLtOP04Yn2PlkeGw0F9MpovRjd16 -di4odTWjE2ht4QhjKyuOWQ0Kn9qRNQIQG6xAMLuSpuD6HilZ3k5ReoX1s3p0uPCT -6rc+yQNNKVLIfgwbeJuGBM3M45GJgdqOCOEQCAAyW67cWK9F7hPDzhbJxYshAuU2 -aFW3ZiWDV1qgc76k2RcQnjN1N/2hax8ZUOJ7IIQCHp2goyGYRWuaQcSg+wMvJdmP -r6+2gsB/0thTP3zqDBVXlx2xQoPxUAliUwZVH4Qv9VPHe98XTRhvuZ9lRflEn6C6 -iDG/Y4jTI7df5IM4hJkp60+S9S4HhVhIZk/vxdN8HHZ0/Skoz3/yMLMM+8UHQQ7V -uCOfNlnF0HMm1nL0sXBnmlgcPpYZ6gKV88pvS02J1nAYAC6KJuhSJudQPcQvUgvh -PmFiFnx0CuaK1EUhOhpuESAk99Yxeik+iPJlEdKv7AHMFGsF/vmmpd5C8uVZuitz -LVpXpo0Fr6LiLfYjV/Hmv8EEsBORBFIEpHw4DR0lAuqspbsTR42w1mYx61G9RKC7 -ANQPA9MB2C/4ZP0qmQBHCf5mD/9LHhicrDatmUaIu/a5/aMOjT4kkZE4NFPKqJ+9 -KWFYyRGFlDOtE4PCA1ydtCaIP4SAKdnZIW59f9MPyXS2XxEv+DkXBrz3P6HNuSEl -ZY7Mrn84t4sovlKgWNlWoKr+B4iQ/aZWkICuK6tLaBE0A0n2SNtL0drb3tIlzNlu -t9oi1lp+U6aha0gbi3HjHArCJGa5SBxIGIjRd2RiKh1vRMJcX2UgUdYLa5w7GSa+ -3nvYjZwb4bb0NeGV4xDtmh5u4FpX7d746+9B+ZIJdgxGOo4MESGUjfnHP5UQpcOo -id5Kv3VR8d1w5tOJQ+6S0z88eMMkJbekhaR0ITqxiD2LUSpMoJS6UjVppz7y24U6 -u0krSJgIpVjQdB65lJ9xxkrW/w11m4w1ngK5nETSSKXiw8jlrLHbm9N6D2+zVx7d -VGIwl+npyIZi3gXklpp/NMIZJjDBkWRxyRo4kSbvc8HHq8m1zHdjUGs2Ngt71De4 -oAyN8Aibx5MAFRpHQNLZTJDLRB4Ro0QiMXEFniXxO9Fl26KjZHyigzoyH8hfCQms -LRCFVarkPAhK7aGyvoqOnExIIqwROa82DGdWzyUbenwRVJkmFFJSe3NYO7KboGNr -iXJs7EgnIp00nWz8Pl0jOBGdFP8e9Pjt2Cjxphg2WbWLxwC11eea1nimiqTIDUA1 -35lzIFI+qvdQ7R0cGPJDflLeDuXBNuqmKLpr9RFXeFpYDkGwB+RIB7tOUzDskKqb -iCCJdagossRAHsfpOFU5kcmPXgi2R3IxkKT7D+nE1wVHbafuIOiHuF9MhihPPf/w -E0jSzpzsCh70Z76sZeOXwKMIYeqB+Zr56dawA+widA14x+CQ/SHfiWMtlJy7yUbq -du9uezQwO6Gg8H880OLAqYbLjSDRyL5YyWM1QmQ9btprxBWkmZK2Vyo+kot7+d8B -T3jeyeFLkYeZ/W0cwmf3WEf56NFyEKLG2p4Zw+uqLL1K9LcJycSXU/hDB02shh+c -w4H1rJwZL+CYka0ojJIWimvlHsT+EnUE+y6nwEAgXDcrH3cxHQzIBTBk4zKcMK8F -hZtRLI1l0M69dJGxmjSRR0N4n4lTGsHiESIXM+AjjmPOca/UP2OUx6XBE3Bq+xBu -UUpqax0ea1lzzoht6FNdbETzWtPaMd9FeSVwipuLuZJHPb72KBVThraCMLyZ3zGq -piakpfy1qRzvpDJJj1OOzp4p2FlIRefl3oisxFCYymunHWndTQADi+1cbMYo8d2H -CvsRGsPNl+CNRM3Bv5ZakhJb0edS6g7VeLe+Ow/dMsAYs5MC3+6WcRDcaDMo7rQr -yJkSzpqByFm36l++RlDtOfbdbGHY4L0uShmEZ27awpeB8uufgqIPqXEaLQVAJYQN -or9aIEA1d50m/MXlCLsdUnpDGCoVhUTGPDf4VAAlkdkaWq6pslIpzQVyDyA/qvT7 -oo5MHzK/a7I7pprryPjF/CcUUR+BivIEn6viUzhKfBUjbY02AZxFp6hJaXrJ7Omh -J2Clxbqa5U0ZduBV7XeJTeacWAZ+GuA2su+9fVXG+vo1WDuIbOcLTj7cFlfwh8Ed -HC6RoJ6TbRlH2achWLWOny9xGXsYzticMLR9EW4lAatXiggxmAA3o+LFmnOR3vR7 -U4MoGYmF5JazD9EQwU/rqOXaDawYY/uJORQ4b0RjVysYUQTGTVePG90mz++KrC99 -L5zARP+hJX3EwWO5Uam9cAFk4TQWnF3+2c8IIdmE4bx7v8PR+iD17KPvizzre8W/ -NbTbPm44fQtRjWIaes5wAthCpvN5XO8rEzWk+9/O67nt0dne2LtzwOboJBTakGHk -2Hq49Wbd673gj5F34RhiwfujFAShyYhAzJyyiwStnOR+sa4yyP7hPDg+2KQAH6rX -ShXsHFz1v9Ng3SgSi5sS+pE/KqzFNn7M4Pd9UnNxByR+uIGHbnd+AfIgOlhKBBoc -DSodFPx8nz+VGLNduY/dnR8G21xbuULrq9aIPbsGis1PoNJcA3cg3AMrIXhyxi3T -RS+azmw0dltctVEMDpg7pnkMCNS64y15evpjJgdjTj5QChU/Liwbix1iY6phnMFe -mf4b6TbKajmPoKZnC9eZc1Ik+wa2lLx8wlhfzjNMNhqHJpM3pQH/EG4znLKSXhOG -e55sdzwPxLSutKCj6SL66578mgotLieN717LmvPZuYJmSmENECfqgv9uktaxyMma -uLvpmQrALvM2hdt1jh7vxDHBpJzJXRIPkWOchE9DttfiG0n7tD6xAGWM6eCQ906w -cHFlprLy/xNtkepQFByTHrmDUCvBMcDy8Rbu9G5/NqtXbalXmCHtEEPKlPx1Vs0a -Aw8dmY1HNLfkOS/Vwkzrf2WQAtwvbkMxa/Ja3AF5sS83wYOjQYL04YYz9z1d+GRN -Kmydy0cuLuoa3+u2sjh/WxORG2ZD46wX1QTLJCom61aKB8gbvYFAKT63O82G0Yx9 -5Gm0SQb8Q3DLRpuvmah4UeLZNXy3LuNW2KTuKm9pO/a+ogHglW1q0jWW9id3X0vQ -f2uFkyD0LD2Y+st0YPaXFaBVSshVJAdpHVVAnXioEKM4hc9RAJ+8I0HxXP4Bsdit -gh3g7QaE9oRhluG0sA2ryQj5Mct0639KrrbEmic9uzVSif+vEkLTcjwKrrN6wH1I -oUbRCy06vj65u92OiU33OJxFpQNnFKAT9CYi1P/AGX8urnItEPH6PDOpqkPbVOj8 -sGFXyBk2gbVc4hGiRnh708ABSp6djx6WvHJiuc0n5cPmcRObCswx4IK6H0aVN/lc -fc/Nn/DELgii5KZ5JtLjIY7+kMZEq0Ga30l5W+RrNHhw+wrRYafeabdHSjL/3DXt -wEN51ZgFRoSw2OlLVz1iXcWPKLc0tqs5ln5ucYTj1hmff1tqGF691Mbg2DgDtKqr -tgWd3TgP9eF6HUL2Ly0BNMOhrqmVffFwUcZb5aU0jlafUCec6wQmQXsMj28gRAv0 -1N1zllXI+/InxaS6p2ZqGS7HhpWguJ8maXZpNjdwOWe+4tKT7bHsNgi90Fz8IJJY -Pv5ELHGgiHdrh0NOwKCiEa7ycQ2gSuvrFB2zPymznDOW38h+kEz7Lkt4N1NMWIBR -+Lf2GMxS2Wqr2Swoc+TtyNgDNgNQvOT/lQg6/f21+8BxU3P2duyKQ5IPsG5hkaDr -T6F3+baDjXVgfiAc6Z3pJyKG3hNzO97izTMbaY2dFZlNTDAIH7NG0gINZzEc6VNE -3ccqMDRvXkHVJwUschMhvW/GHuZf0ry9hcOpsHBd+sLkbY5ya1JV6AJ59wX/86P/ -GcXUXXBbJPxnN4wjTSvX94ApfjSJmMfPAkeLNktrpqvf49kSjmSmSzVeGHvO9yjI -k9dT6eu6pioHz+S5D5PI1zG1VhmMAvnOyefMPEFlkr1u9l7c++y9lQ4Jixsek1bC -kQoz6ikEN/tYsZe1GT3eTrZKXgpSX99uRFrdwIcsCRC9IMEfRo8y/Zy/DkGRiYkB -B5mgFgvfmWqjUVRI/vNCIhgabfx7yvMgcCc6U1lxwLcsZZKkj7zaajGukHhIbKy+ -EZAKJwTcLE/jkvuuyvGbGvuvqEI3+P7wXudFgMLc8vI6u6Lg07xW0Yyk3LKSglYn -N8D9gzVoOqglGHBqypgiNOtns+xPsm4p3xin5UFmysb3cIkHYcoDiP4Rj5S+rrfn -7Mq25ljKn93xKtQJzOLSF0dwWN+7NGAKP7yFBfyTXVp6uzJNcjHUfhcarfS/rvmT -pwEEzM7S5+Sv8+veOgudcSvF917baai/3niqqSc6tHEQm1nQNg5Mw9Iy2qIxiPkB -RKwCTKHBd9VG+H8ezqVjV2hEn9qo0u4ll/MT1wAoB4YIdWQou4VSAwrx81rf7QK9 -uppxed5GxyaLjnfOpxan1623gpyk+HFw8ezF2FpVXLU0hb3FBlUjTAbEBiM/yEfk -mJa1rZL7VBA4v5e1uUwO77P6ibaFMkd3wgfJrq93jTP1QEtXBnMpuGrQXSEmEy9Z -rjX7zGPrC32wYCTTmbfAwEetd2XtcrgupfbWm7fRR0ZXKKh7DK1Q4HpYZyd8f8ev -I29GLssQjiyTlj8q2Z4/MbvOq/nlgda0w3rrbHikxUlY63eVuuG2wKuRo1JVF+BI -BDwhlk6awAvK+OyEUyw7UTlDu10IEzaaN3oB6MbDnSD72e6Gd9vv4kja31KLm4hC -JckyLk1ZEsnzlP3GE01PtxMdjt/HXUGRLwEwaLxL550d252G/hzkObnrg+VhnV7O -qz1d5lU/pnDqBernB9Kwbzi1Tj2zXkort2i1q1t6Lf3ZX5M8htM1osAzbOIllMYa -/1LCED8yM2ZwOQYsY1R5fAzn0vk06c+FEvwxbBOywjpJaQSys2b9sxYMUEPi+xmX -xlDPPI3cDCeBh4GrDORPQwzlecd0pEG4+drBJsm2/sQfpz1xI/HekRpmidxCmrpc -9Zo7nNV6taI5Ob35P+Yez1rMw5lHcm0AZJRD3bk95ApLX0g3/n9FzVNWa9x66uGq -vQP9YH3kpvOEOpsKRCxIngaBUvH545XiQW5TAGTS5RnEdEL5jktVI25v635fA+Ae -Jy571OspVpxiJFiV2kux+b2aCrGQFMpGRDHY9Jrf7eNz3NMtzAtkwjUI0VYl1zlV -lDjdScpuYM8l94m9sUzipjzUktVQzqhO9Thjp5UQUDe30bTkH3YZ4A== +U2FsdGVkX1+EjJmnWmQEpgyUNArAe8yFuGLfSiIhYAY/a9bxa+ItpMqm5UnIsAIE +np5moXmd3pj9MXZTVJLJSSBQ/QuePjdEfL+HNZWX0Waw8GXdmIWkBkhdBLeRdXqy +0XHRXTptcXhFXx9fOenjgSxm8oblTy3P85EsbVcUrCP9DWEyvAnrFpLSFFcz2OB7 +6/wn5EgMHGhyjaTjcyfkNU+Ae7rNTCAj6Hcl049UzAFb3hGqaz8F/g0wp/mX/ThI +2qDZCsLlREPZsgfqQkW1zXPQxS08eRmUR9FsDnDH1X0dbp/9eSHu5IT3MrwawEz5 +hS/K3XkuHFpWQWWf3JBPXYl0A/m2WCM39bmg13mfnDBJsxmnEY8510zdNTHTqSUG +n9L/ffp2CpCjEDbKQJuzrfuzQ/h5I37XqKvE6OWdY/T48mDsqajGepPBmZF1WrlQ +y7ZMWIlhzxw8P2ZX1nWwchbTxy9kjKs71tk+ikKUoEU56SY+WmVjKvXZtUB3sY/Q +2Nho49hpGJuPV0tE6x8oAMW3ERZI0BYWMrLOf4bm76LGgs85WrEzz7dnaPkCaEbH +SGPx/U93f7zV0X05sdOPPRoZ4mxLxNOtgA8qAQLeAlFnzbLRlq0q4M2MQ4YZ/9HY +HTE+CW8sj35e6TRxPqfF38yisVv9JmQEhZ83qfMqrTaR4PqLWhCm/Hak3LMUNRTw +UrdGDmvhCk/BHqj/kmRyCY5Ts9Z3gktUD9OSuxBVnPxRo+exabbjGqMK0Eslwzgc +nvgba6ReKkh1xf/3PgI3A1ZOKJzkE3YctTcHV0+o26I7JW4kesTdXYIM30RgVF7X +N/akJFV42ZT8xEmYA0k/v3tM/xpQVzj8FbzAjP/d8L6etIolTJPWS/+rkgTaiZdD +EWN7kLfgzmC/OClSjuGRyLNbRed7rGuDLmKZKY+6Bd7tcLgBWHKJQKnZlJcoSx0C +MbTS29iZHmEkV1WWeTm/mrjqZsQ8/5ky4Ug73RePk+rlFovxCLSypFOxJRHR+nte +nYE7iJmTVwE008IoKTWbwxZaqbROSqKL9sw0r5Kps1Eve7oGSoNdzzCTArlteN5e +yU7pz1lZrOWGlT4pVnYnxuY7ayGkLvxqNWBG+bq6VYsh4W9oMHyW4hmRRTcvfB9B +/1B57RUM6rCxAWZYd+/T2TU0Me19NKxhwHct0zrllNwypWUCZQ4kfvQvVu5Xo2AJ +LawsQ1KUcFfVLUfq9ecQU8OSU4MYYIibPrjUqmQF7Sk22HM9cXghAWY0nPfiHTVd +jdWn1c9z8lVNk/jZVxN6H/MS8mC4BRw0tCJDWKWEkhxX3V8AvRdTOIsKYp+i2U5j +CIcVGMfAtmqg1WpJfVYLZrYI2uskjL6uNmy7XmFxdztbiZyuE37NEd7cw0q8n8d/ +mB+BBQ4x6Jd5qvlWNceaRv5QxNZOs36CxM/ta4bGBaAa47CuJ1EMo4EyoirT2Hh7 +tulT758q0HXYsKcg6EImwAYJvEws7MDGFdpWyWz2c9gQX6biNBRXfaGzoXE+hQ8A +e+qPaxO8s5yCGqiKu47KreUotbP0YePyEE+RlRIJGt8t9JGtMVz6k8MzcEVRhtWK +iX1mwjUR2saocyXATYggy7Xmooy8nLjvKqkYx5FKmL/csuwC1gdlDrLRl0MPIhtP +z5o3SSLDud0TzQdM0vnbaV33ynMbon1MRW1CL5OKMHFMh4Zoilm1ti26JiQZQGNS +1899V1IAjsVUgLtCctP4WiIlFw3BCwYjXHnD2r7gCJc+jSkjwrM+ayhZ9VIGFC9K +J7+Ynx9eon/QqvMT5Ek0pJnVsmjJtWLsmOEWmW57VFWKCgW2wW8s9+/YLdUnRCvd +B54x2MNHcGLWx09C/Or2Ir7U1Gcz0LXkkzbDifabUjaH3LjIfYafnyYcu1BIV9R9 +ucP9f4Kow/oWZHi5t0SFNO8ArqLOumTHxFpd0MtsV64xJf8TuZJkSEZbxrb7VoGM +p0zVVgZTp2Ryf8gN3kpTPQE8CCZInpUgipza59z5rtgpGR2X6y1Qswb7vMez9NrG +ihDA3iQX6v/osxNTVfBsTfEjE5tj1vWlm5JxteGZ44RiRmMk5GuvfO3atccFEmxf +NKtJy14jwmy0IepxSMrs9sa8/cLQGH6OBOPUqmE8OpnVtlu8BUt7y9BrUMtSU7g1 +ej3drUGzyuAMSXDPhRuJjL2KlR7/YPKvLpBvhGa+vn/VkFcaeZmx96Aq0dOA2dQD +v/y5xiW13f1s6/MNfm1pt4vGMoijlYHPPH6e4sFRGhqcgsHkJuJuF0HKCunXbrEU +eSgpZ+zzAEt0C/Bk9iaKYvqG9m8YDL+vG+hZETHucvmGATURoyB9DeyLAgLWSIGb +LCiL893yhbgVc4j7tns98X146xX3naHZcCUKzN7cFAn0Kzdxv4T0TMw10wTuQj7L +pzykhyrlSCatGK9ydrZWP2rg8xIiAxtvCPvhZf3Douk4KuF7VTAwOSM3wlgAnuZG +KqhmaFb2cl/UynMiREMYkAgnSo4oraLSIDT2tKkCm/Kv+Fx4HsUCAH205l5ew+/J +9ftje6gE2evhaycR7sbOWul2N3gWHPxNjq21SjZ61uTVfrdu1Ls0vPQ5VlEapFmL +UeHCCiRL6wDZx572zukaueonghq8PUO4GoPWzY3j8kX3PaObzNxyfC+Nt8n2yklJ +vyuWzEMTym5d2knQwt/FSsyz+KCHEfRwqd6Nj0q0KTyfwU2be92Zf8WPDhn6Gv+e +dNjGLdgjX/bvhsEJb1SarSPM6Va8x85qja5PUxcsWYyEhX2VOxWNnG/9HMwhPDDg +oHiBRUKTx2GQ0MShB+M/yG7yWu9JRcBSj8jk6+z5TfdFXfed1+3+h/4v5FNlvBQw +E5WRadVZRa9FjgpjZ/ABDZbTUTIM05akVyegoRxViX6Q2u93u3HpjA2BYM633uJs +l85czl4l6t19Qaes/wyStS5KXMYR4QbmHAmn95zz5C6UMYa/F1SXsbthooEuYMqp +WFBhceMu1d31POJImVbfAY7GQOjsy9Iv4ftbiG0SI+PvByP1/HOwlkyJi1nWRPTv +7yOclBXc3WRx/zvejsQule/Q2pgpeuvB/vw3LVR6GwZfsPzSWoyHurmIJd/+Mpe+ +NyCaUrTsB7IRwnwCX2eKwb9qzyLPZIW/iveJPL/2YrxK1IpHSvbq7fSMGmgWH4V+ +mYIEIiDavMCuZk+yS6WgEF7DtYT8ZAn8UCNmLvDtlMXMVXKHIF7rflSiv3zm1nul +o0giVpplQljtFdl0oOw7iqhpNzSNvTSzHGo/wxMT4ECMuWJ3/cYttFmaf0gY83W8 +tEKl7JG33jxf1MRQwLe99Tn2XTnZC6Mlc5uATQFZu6hwekhjzgO5HBkaK7yaGO2U +xS8m8QT/MWK2Wkqz9WFXjlerLAHR/p7GiWSuRdB5PaBYut1gZwAJvuE+X935qJ07 +nP6jAwuITvxBC3rUKBRcksthAVbVtfQgBw9OfgzUwdEuGf1a0GhrqjRVyC+bwLwa +SVAzu5nlMK0raxxoxTKJ41mWe0wFft8voz4XXCIg+Z9rnNLhxeeIyVCIFFbbDvjP +taYLmadGlxc7Q3aGphWwlfMqVmkyz+8OYzBpXxF3DH6cUFCuqQopiA+7YaJ0GJ7U +AuUYrFWTIAUXcoXpZaZfpirqpoCx7GTkKr8uxFer2KnJcgBtZaz4nIGypVyxYZw/ +x3yMHoCP3MgYJLeMklb0NQYpFhm+yl8T2emxXddDdDqFWKQJXrk2V2XTCsCLPNnQ +qs98SR/jnirtGctuNT/gHxBQjzQfBZNCMEfHLSbDjGt6Jd/KvwsfDC6Pc7a+RrRN +TBObD2KPs0i4mTn9dYjeesRtC9GLucGZ/uT8CsybvXRY2kZi4GV6uHJuQoCjZ5+b +/9+TMIXoWSw9TqA4kesARL/ieUYjUAvTGFsPnNmvG414exjPMFd1bOO2PZM5PMHb +3bj9csVHmF/+MvbUEzJSIl30f+G/4T/8pd/dvQBWbr5rIEys4mLGc6bQTKDp0kwo +9SXblV71Ff97UNpe0G9/m/RhL9PxwTnSOg9aFhUHT/6oWBNeg0TqZdzHcC+nZQ1P +Jjuw28ym8PFigJ0jIwEDoErN3ycIMbvksw7knPbNnGLzsKRaJlawVhijxE5gEpek +fUyUdqo8VCvKkBv3zZmr/QcUO9wF4Ru5ZpHIL3e7K61bgB436hmWMUkLfAiNAxrg +VW0i5HCo6oIrVTyEp7SoWfM9pKyE45EIcwHWlxfyQumJwP8hXPB335yqfyEWAZ6p +9VScSwvvLbe4ruRmjhTsB0Y3i7P6RmDf+0iVnt1g+BG/PAEeR3c9/Zq0G+CXf2he +EFBHPAA9Q2MLp6cF+FNorJ7uLFqCFK/S+IyvPbcf6RFJyIzG+IJf1V4tvmzbh3Bq +O2qhmfaCSDrtPefgIC/nJxgBzDPfJKBKLsuIBQFeapXS3LeZmgZ5btGPy6xasdic +KScJQdPQpF1/bMYmFkf2gdjBxVcIuJE0YgIH0w1snhR9yWRaSGxUafQisOvdMSFR +rWC+9IYHkxNDfhdX+HO9ZMpb6IrwBnw4vv/bHanpp5KIo96pcX85gxwQ7tEP7GZE +4gm3/8fFigthmOM3yLyUdaqKCgjnDqdVVqVldqnNRp81KsXhE+pMdZQCrTU0F6gP +89bYgTA1yQfoM7TEGUz78zA2JmMo0dOIu2dwu3tz24/l5tDxkUVOF0Tx99RaMCix +kvQKYfMFaaBqeMxJViJkfjPxHelqDqk3WAqXhC5t9m56aBqWbasd5rO+7BdsoraE +00m7R3WpMSFSBEEeYTYMmEqyEORVVEqRa376AMWLaFtKtWLu42XFQvJjYk6vbCoX +ZTvev9C/RKwQkrK+JidNJA20haG403OOACxDiDHn49E09KgrMIl7B62hdT13OCun +1mMe7QzPpN2VdzglWGQsz2cyMMBywCiD+A+bPku2B3w0pGGHjKS4IhKiOLkNeR2u +P1qQ2d5U3RRk5BmLevQVGgeV2bHTZP7lDbO7GqWL0uvgWijayoqW/RYxiniXpCNJ +ZK6QwWKyFyr+yaipB8wRm9gmpJdawo4QHl/0T+05jgTHdg2O7t22OVWNNYF4fzms +TqPYYV7KEp3sGk0jrhtzGwz0FE57NLZwFyz0E94afX0ZmnjBLWoFzX5R+YddoOVN +pikf5RHi7iAO8fVurVkoiH0G6osukNqS7myeylU2VjqmmB8u/b09Up3faAHPjLEH +yZewcVoAc+b9x9xQXU7Vk4RO54+HsyooRZAtp7ZXIyPrNpaGpdJV8p4oBtCuM4SR +2BPncWvmmwqae/NFMQnaLsZcDzmgUoyObzwf/fiiXdCYMvJFyFO0wt8yspUCgyPi +6+mkVOhH2mSZc8PFZKMb8nPEfpLsXN/1k9ePdXx3i43tTVeXnyAfdNU4hIjKFfgD +w4aXAvIPe3oHr+gYgwLFgJqTAlr8Egb4Ae+OmJgSI5DCepA0pPI1fmoN+aWT4lWT +q7xUGgQBzw6xU4rIptQfrpNXU1TpkUqsNYlLL1Uu4o4E8d8rdToziVAGZGgusPcT +LgbgMY6TcZMv01BLzq9L/u5acKQKf+AOMQveVupBsSlpVQda/ufj59Up7tPXq9J4 +w8U7qmsHSjv+DczwgzbYn4oX9au2aC5VtglZdNIoe2DT3QMpolblU+V9rMLOnC+F +19EDLn7U+nq0ZpxeSzH0LXDcslN7e/pGwcPJymdk7KWCXf3R4mtf7+IKup2xkO3c +PmOZBYGmESiiupBGYeB5TjmPBRyW/ESaMO+/kwBqV0v5lKG33ATCSDcZ0gq8DOTI +hJza2b4tF2V1qbbAn7FbUHlgxPKvcITsH7yOA9JtdBTSKpixTR6OyV6UNq4xWuyZ +MgzQUHH2ydLrWSiwc5K6KKclsBznaRcMv8Tjj6Ph3soWGRO9dtiV4Qp52OP1HMPA +lX/yybLm4wCjXUetaRkH+bg0+Cfh6NLylXh3vgs/0BSXAVX/zJriYOhpVFsS4nAX +5GVoM/n/C1oEfw/zF7KA2lTDuQHHXJ46PpOxBNfdC2Bx6mcLpconSrHmwRpfdaGT +dd3aeyDZr7wFgsB/gFcqLuOG8ZFW6m0rmMwNQXa6nHImFgereYE0Qt2uSSTWgFH6 +T7NaCsTfUlUvrXxLoWjx7V4yotbnxCP9g0HClewDeXKE7EBYjI5C4QCPvkVfS+2K +GghKjCROpHGnIrl2sghAtI2twcgKA5+wj1Zz9H1O8t75sPYneixHOZCPwNspO6QJ +GSzVCKM/90Qf4io6l0l919/40Y7RxlDJP2c/HaIYXq9aXb/DTOA4EM7nTFMNuPjC +Gl636cfvYEyV1NPEpjWP80398wAsBWePWAJAwlkedw1+Da5ETnmVxG6oIFIS+O78 +2grviqXNS2nzsY2hPU/6Sbe/FGIP5z++6f5dWFNdS9hRUgw57nwKMNp9emhPMeR3 +mFQEqfGNJ6GG4mttCpwXqxKOiNV5PGEHG6X8fQbkBVTApNar0lvTUVza046B7Prr +1agCfgvdSU7ScInXy/ptBEUzmcurc/Qpq3APhyU+4ekCzbkHOz8p8CaB18xHatV2 +Z7mmYp7m4DAHQjCB4Ax85wI5IzB3bzfgw/uDkSgkyTcfAEdT5WLplfBlqf+SDg68 +eWE1Qe/Oa34gwHAdcCsKpg214I7vmTaUbjmWtgqhfTL/98b/trF9bVoZONjld+rT +FRIAIU0JBGBRWbadlAmTnt/vZLGGlopKehw4MzVIrv7HKDxF9dy27nXtGwWFHnd8 +DgFEN07e5U1yjd5OJ3hs+RLjozjO+3cZGxn+rodXktcOXjduA1nAaSZrFPlwjURM +EwnEHWCNlEw1NxlGmpwmuJetcbyqkCPia6Adx5yhlvFE6J+a6n1jRdrbABDfMJ9K +8sCCLRP5sdmrPfc/EoHDocjgyN+khJzfcGQxe0gTzploiMnZi43nOZISh0LG69zC +ApBdQo5ycR+kHM6YMpR46EUl6MbDF5h50leSatflVOsuauLzCVtPHN/1CjaJ+uij +H0yT4ZPFCynxb0j/Zk6J4PuZmxdDVRNo4K4C3vqF7eYgniMTaGbsD/gSubbCC2qg +bIBDYI9nxB7of7Qt8u8CkfO6F7KrFrCDNT2elbRonKl5Pddw6KVaDLK8sfauGpgb +i/XQaL5fVT31e5uGNQYgi4zvNb/JxvY4Yl2K7qwVWecXvgnDfxdgGCDoy0m0h4iY +ZFKunIIPZq11ljy+PgwON955Nwka9aJSkQDKo/t7HKlZqQxYpYyOFd2gsnQEx1FS +rfj5ZgbOYjkPumuBzI4Thfwf+e/0I5stfUukveXsoVmSGt0ta8euOKPogvPZHlsu +67k2DWMsxce9N8E11AgoSQVboYI6YU+VUudyrRdVhUEkp0HvuqxbSB0uz/E70uWu +xZr2gZGhk6yfMAp+yQcGN6dJv2u1GBslV6yYpr0BfHEuJnePHJSwmL9NT3TBrcyS +D/Zh9NtDnuQsbWZXKB/1r3XRqqTfTetDyz4WvRcvBmkoQ4X5RMVT8ywlGzKFieqC +RTUVCWNdG20rVLeuJcaE5txkVfTgSudIEl5aL7ERK45yK13RqP9W4G4YyQHUUVPu +y/sKOCrpGGJHPJBcG8Q0urqeRPq0DkLP9qD626+By9cmMpXW9v+Cc+ZWV33J6dgA +hbBRxq6ruom4UEbNRASkcfVDjqGHUnIhbHI2zgJhNWFvHYt1cJoYPWHXza5lxt/f +NFw7GOwNZolB1QIxv92PAY/PAE7bU+vAjOc93BkI1V6zvNdl93ehCxB3froP0pTY +o4HxVT+9scFcopUH7yXEOvMGXErS+a83ma+0Lp3uEC5be9TdgcmHlHEDfRigiMoX +y1D2lRhMGC9n1kiXsIA6zMnZD7rqlzK+jJSth5yAqFyJOLzD7KKXmCTL2wBpspCM +kKHHvq+VuiLJC1fjkGdyb0gzA4YjLkGIchI/gFdFF6l8/vzGGIXwV5UiranVu2y/ +2gWUjohF17zeBc3O6PaLZLPaPj3g5nFvKGXSj3E7Qj5gtjXdYq2hU9vfShFFCXzg +vtQS+wJZ/mdZAYrHh0+BQ5v5hggXvyQzTUuoxWyAp1iZx0GmGuqHf3gy7o8jwD0K +KfHSnIepMaBR9e+XYQBmvLfFQLrt2KaoGyc6Js7PXMloPl34AiEu1Gr2wftZU6tE +BKeBJ0hY/dAG8s2zuW5SiyMPvpttLLBmfCY/SvlI9tPCbLHMs2NUof9Kkh6n2pwd +PVyM+HLZiLkfxux7ET/J6QrjU6dVts2RXtHDOu1ZJq0NkCu+lj6hgxGTyoliUnub +RRW/+HIbpFYkOLc1LJvJD/TCylTIe2y4ftuiAIVR5vNR6ZnC9Xfwy9pNkatkeeuD +P0J9gJliBjr7lUT9yEzpzFCntIuU7Z8GRDxuP9dnka8phYMHeUkUpWUfUPtVk3JV +NhuaTllC2zO4cfdgo1pMi47tgC8cOwHzkrf6jA5NeRX9hyoHPpDNaKS1QqYYdXQc +r4MiHwVxg3aUAa0j8EQa3V/XPT4frKAQCJvXs3lzh1TrNcAz3r/IVujvNLo2bXmx +B3BTDT6WTqYXFxBoHJKUV+AkMzE3L63o+SrHlOdcqDXf1aDs/YKhZGCuWNS1GE9z +Fo6kY2LUsnTLuN/dz/fG+FtMuvLtwJKKDGX2LG6vF9Fi+uWEFFnj7+RViXF3EqL+ +qOIs1A5XWQeDmFtAk+079sTii0/AdZPR9myF7rNJQOc344Rx+y1OMr7jV08tFugR +D/N3SldpFOQ9Yrc8Ks+cSgYJwUGvDLSg3awNq1wv89hWY7p5UwRtntlz9Evmbjql +CmZE3sbYKupamNaS6rQmyf+Q8kEy17l9Ld34cSd1n2slrUin1KyJZfMZ2F5f4If4 +iXWxlfrI1pJ94F+Ud2n/nQOOiJ/qN08dX/N7qHqV7qJWs3k8eoE3/rdllfaeyllg +OvFuF2XsylR/FIPjN9MA81FfGWV8tWfX/RtHWcTrPutwrV/G/OERKLBvSsTtUxDF +P2bZewouZqUnjXUiRQoPRLPFHLjnmjwHAaVVSHZaHcePFz7pkSlyl7ANfXcl++KT +yK8SnlXkeZji9e7M/unspiI5DWG9HNnEItgmwL4v/XeurN1uPpX/bqjSrkPPASFF +OvlIYqR+Z8kjPuy8oWKzlapquqaqgGrNjvThX57fbLYvgLCpAkRPk231cVXMJBi0 +rE80nIMB14mt3qM+t6FwsMILYrqb78ZI9f9FwGI5eP4mHmPwL8TUfnXty2ZslPW2 +lswgh8GG13272+U4jXVYQOyhcxPuAscTsspUxZH+Lo8EdqpweinxjcGh+DBrQgFq +W+Cfj253dR+y7u0v3lP859gnQKVZk/UxMebGZ3egRniUAZDp0vjeVoqY9RJxqAQ0 +NY3CU/Yi2AOa5bIaaCBbdJGKzTlQdphwMec7q8hlpJwE8f5j2CUmhHrZ/78FsSg9 +uq1zDf9oeouorQun2P4FTtSAa+Hf3MHCTtRoqCMgtlzdBsS4ZUQzSlnqkzKUVJ/9 +lHUeP9MliDwBba2dkCEU01g+BThz3cRn8qEVo6NTEWvxOCG4VB9bWlIDC5UyX1Fz +MhnJe8wLIQwU4DwUex6UWc4yvWvy+VBZvbokmojiYQtFxpcnbhTuXaRrEuSjv9PM +ucmG2i1E7ACQ7+a5u8Ot/ssip+Ox6p8V16LLSKskDt7mNagTG/QYiCZhC3qFyIGV +SdjatCvVIeKx1rYtdslFGxD/UzivV7xPfQHWPU8n6wHWfAVjpUtxsXBJ8/xok5VT +vVVEz/t5IU1E5PImAF/OVfr1kk3/OvG+0cA1KdVplpXE5UocNZwk9+xsX4oHxoto +W8N9ePxRFdCxp+OyVNQVWEmXziJb5orisDDzbekxGLpIQdTNK/IeMsLzA1xxoSJ8 +VEKtk+Bwi03Mj1CBoR5L7zJQiUuZi7rcalYfG2OeOK6BlRDo1o69a5Z29Mry3Zcq +x5lsU9wMhZJ85m7yBmSGMgKO87kbsgzOB+vDWv56SLAWaIMxgtQksDQeIHrBBHmI +ndhvv46XSzfTIeNkSYXTAzjg49ibqz/ftuTWzXrpeoqehUlV0DGoLfXO04vdpkhW +dsZN4qkbKAISGYWETldWrc4SReKewWfs9CqYNoKhAxDI11efUhTLp5o4GD+LVO5F +Zo9B8wZ6AwxqZZkzY1q+GWKab4MM5K0NhgkvuO2XmNMkHf2Pd5BLG0tf73UAXXsj +iFFkYUapjW76sp8zO9+2um8pmV7HIDxNOfaLQVnJW3E+4Qtx4jVoHiAaR6Kr0JFh +5u1cX41mU888IKunnc8DkdSFu6DoiHwIM913nKfCtE+2tp/kzkN7GYoNBqEmhkP/ +RHqiN8KcbmlYZbo0gMjzSRRgnTdJhfTPctaZOylBGJKAObj7hs8c2LB5jxuHyRMb +wov/cqPMJ1Hqx+dGcCzWtdi+6M10596P6KHgfUIMWJKqxFtwEkG6TWtG3ivQpFX8 +w30mImFWKtZSx7XMs/HeR6OkzFIOjm6EAaZ7PGf6DjGrQNFKo1CuS/EIq1+x2v2E +gnUhPiHEdf4sHhOVuZxmniCvnfWOiWF0hj4XmmdqpWyFIi8kJEqU9o88oDnmSMCI +D9lqLQBrgX+NefvxHNxSul1ZMFLQ4weVCcYSyjLsjokoFwTNqlrWQi7rLPPmMS/Z +4O6PDlTEdm8DS7bVgGrTlNEKMWyAE4TEwPDaPaqBYZ/Rp3cCFJSWohpXLenNXJSK +XF5Xx5z6BkY1zGmSJMd1sUPGb6AwxEceywsxDj/5pd9fnwDvqV6d5+g2YaB6Iq0p +PcURws9MzWpewBYUtBkjn6+FQfubVM5Bkr/u/fw2UP1aJ5+x9GDWyMKY5pzxgS0M +FpnuplbrV/30B95qLVykwal/aC2v/aECUOmJsjwXZtAaqoAyFMR9eZ+W0a64k/Tw +v75b4G7jCa681kKeZp6qSQhc3l+PcIG37SFrfsrwLNF47CWEyNULbCa/8jabLTxm +Swxi2tcE0zAoyUv7tpAkN0GR1knA4W+k2Sg2iMppSQ2lV9lGFhTYbRohF95kGZWp +7oht+oD3Y2nwx4TWDA6JjRPHVKBQs6M4I8T//+fbfZgJiD1lx/ZRPrR7jtm7M80s +GDRQvtZ/aul9R0KHweNZHr9o55ywYELf/BNAuUYbH8qeacg0Lc0y69rep/Dk46Hi +EgVrc/jCSKLXm5Gv033BJL2NEZK5nsyctVYXBrk9rA3iO827jZDk5o1l/znOETYh +Y488IK1wfbSvfllyWLoHgtHsr4I+jVg2cgPWK6+ST87GU12almhMPQ2/7ifO52RE +h+ZyHO9+E9y297aMqeozrqUX/7Tr0u3mnJ7bWB3OVz/yezIjw30N0/8zkEM4aIHU +itVupvRqQUxKPBsld/QJ7j/X3oVDX2AWHRMMsKRpES8fWxttWa+DEBxSFCPluAQP +zt9BFDSboyztzdsIsB2Gtd6WRJH0ETOuqvY6/MMqPwWZy8FYOaiJFAZ/Vt+6T+jE +jFAz3dBQfMcYYW+CYt1943atXCm5V2xB6pSqzfCxmgjT/Sq0LLEooME7lY1Pwgqn +wj7VhDuAEw3KtgRheg/pqD6Sf+0pYNx90ormkU83dk6T9aqi9H/mSkbAP6y3OeHM +B99f/CorcIntsAQo+BT7CFx5Dt6kjcVBgtiEJj2J6JMiNFaL1xNQDw+29d7aH5cl +XuKkO3FjTuyAUk8yyllbExK41yxQ5/97vmKa1hO6jQFrsYmeLj18wX7sBkvN/IHs +FVYrverD4enFrQhRRkEstB53+a6UAUJAB27gUbAJ6o8HdCFVm6UuVAOYJOI+ZrsA +ai7akwRGuAz+e5NTYOY8LwOWwuXpBlMklsIgb7Qf/NzncoZALOONNjZ3WS+EwdvI +c99VIvuTp6CUg6BZ8JwU2Hff2jVbSyduvu+0ZC2FclymJsNaFX2pcudwFHJCd2Om +FUNxQ7oCefgu7jA+ji82hGhLWjEM+bur4GesVQdLXxvx1W7BHRJKbp94gaJpvcFk +PVSeN2fdbwavapm+8S+IbB5ciebPpiK890v5LBsDPxR4yjVcPwd6Ssw7lxf9jaly +8LnNAqRAlVbU5DZmItQDC2NoncBCxgkFIE52w7nq6oZk5THNunApuQrGp9TdB6DH +W7kWj4k2ZUQcoyr8L68grduQT9Pc3QHfefwhasfUfg1Rn+I3JR95qXh5zI+ZKmx4 +bvRrtJpbSUt4mtY1B9pl9smhg0Fj7nvBMAcZ12967FUKQQL2VBRcpdFSQNgFR5ml +F/imKA5ll9DeNDBf2nIMwWLjIQK/j5rlDlaG5cfvmdEjfJAIXf3WrIEsDR9Q4AX/ +9xJM7TKW27TqaRJeeZM8b/GAIWzdpFNbqliKR2JpHwqlb7dNkfV0qSVoXopQvpUL +H7bJMEDzn2ruokLzymy7fk/OldDE0Cq41o8VVXpExxTIfMxUlTVJtn50xS2yByg5 +NQfhwlpffq4xOljWwyLaHbq6VQYmN7OuxhjTboow+M0s3tQqA8sosiKjaAg/t6vZ +6HzlmSNoLgd4GOg6AABqblf2fjbD/4mpSHqsnbIIqtVktFtnzWiEU7AphIsztXSf +0HnB7l7xlJxEM6m5sQcP8FNDfErWMdoowBcJgF16QMS545ZqzPANokeWMSl9D57Q +ZNisoZsMZ3UG/rL6TCPMQnqHv3kH3Co1yl+DNrr8dGUC4Bhn6nVg6BFBuiNSeQUQ +uVvJ3PuYeC3NonjSMiRxwhkeST4XfEg9S/UZ+ApLNCLlM5ql5ZJObui2uKFzTAed +jATO9xv44JZF8S8Uva59mQvfC/+CjDNHcOOrrJy2d9OZPu8t4INZs83UTynUxh9F +2Ln3kYZ39yCTwSvB3+kV30R7/n0p1Cfdl7M1+fTW99jKXJ9m9pqjb9QhunOY7T12 +3yCdyTsdwX/PiTIUwM/pKZjw7xuQixOXR+OZpiHmvMnrzUSeMZsxj629E1mrRMhc +jDZb9ZnQOoLNggbaKTRuaE/DjpACpuMpi6jmnjVPAzMyGDOtQ7eaZx5lrpBGgv7z +pzd7SxXJn7ytfWu201TStZd0FyLjiAoYiSn4wNc1itbAhjirRLQxvHvaUbGmkAiY +kdOjeebjg4duEErxaDcRLcKRWk6MWeLH+xP3GjWSeQdalpfXRr1Vbs+ErAC5yWoO +D5W24qzZbzFeWngKEh/Aykchm42ZY5oxsbg41frjD1eKiAqCs+Is8x9CwHtu1uvN +jwxcA5VxkrxZlrVyorm2+mmnZ6LPwccUk8fSUnR0HtI+g/lEVrr72ho8qGVfUBau +4Muc7tg48iggb8Qs6wTuhG2ykBI8agGMV4aIjV1P1pBniXi3RamcMphUkSSplFPV +nBEx5RzaRyQZrtoBsPfGglpeOIwr61l6zgTtD6gWFwCvFar83mihzMzaSo41hDnb +K0EiIZxK3W9jnqV0QsCdKxb9PMrBM3ztAdtV6HJfsZzXxqD+zCA8Hz8rO8+NFgHa +42G6n4nEfXCUuzZOGgRMFSyCFZ3g5c8l+ayfCSk8PLF2Lx5K8cbu5qZrhepicJI0 +2+sEjhpf72oyNmQ3B1lUkRcRRgKWQGfRU5raris7oSiou5zmZUgScowNSyZHObGI +9cDxBrTWbm5kkS9UlXMBWs1Div//UJEmTnbwp3evZ6kAjunedxIjI420jIGD1VCu ++QQ9OZFak0glvOZ9L+9AbcnznmHrOd18qHazDPTwmpf7bAvDIWMwj3K/m+bXIvjY +Oifh6YR9AQiZuvkvFehZR0lFA/SciFejES7UOyfTqLPYDl0YC8tywGaGtNOrXc+4 +p9kYOeYxeKZO2DuOLr9rWgebSi8OCQ1MsigPeYintUOjwDHbWhLZ6tnnpA0/5+V4 +S9ozLh0Zp3P8PyyYUCmRUGEr7Ly5Ec4DM3iU1JBp++ecGCNy9HY89LCmBcicsMV2 +uqP/umcyi3rCYR49SlvGlUNFeKwFxFGoU/zH7MXSFMBUfulLy2EPS/kX9CdgA7XM +Fu3QGYiuyw+0SbBRAAxKACUPiRy80TUidmxSzSWetRlqdW3T/uOMDMuxZN/UbEsD +nqVJJlH/7VBI50cME1SghRw8Dn2Y0wz+pSIFOR2ybzRZuQnsQ3hMZ/tqzuyBsMde +rtHFl6quRela25mnvKDsOuk0X2f5buDLP5Z8ypshXxOyy/MM8aXbvclq/VvpHLu+ +pmE2cztI6bWlMfEABeev9DZ7zjZ4ADPpm22LULe4ZGZlnkR2IjQthPSlnDZQ0Tiq +s6GQCPZbvxTOfQgtgVIQgHph/j+AuYOGGZDrGTyjAe3jeZyaAPL9mAPvKN0v+egr +eJ7dmFnjojFPYIY1JGQ++d3X4AGG/08HnxagRwEsaKdpF+w2ou70cW1TupdSOg7Q +6as3IlWRmPJLExz75MLKcu6jdX6i9b0LkuSZ81PVmXcnsCFaia2acbNytLyAMtOI +ggFvlhaaOueElE6G4LgfB57Qler3ipEO3D8Phl6bRBob3nZzmMSEJjICvgMWS6A2 +y0klebEHZQ7nAuSQRA73/YKty5HuwKK5Q+u7n+kud/nYuB15bCC7HsD9AnXUZWY5 +Wuv5r3H/TDmmqcACbFLjjgBQk6xhqcJ4n8SiN+wLhCSRudPhuwmNppHqC3akHwzH +b235z+FnKmTcbODzqJdiUDgSR12doVX6vMVhYm8rSWms99gou2dcAEtN/VhNXh7U +ZpYY3fjcp+32aSu/C+8TT6IBnUZuHvHiE+WbRf04xCzkOs9TxAz4v/8op9oirgln +cPnpxYY87jlMcH6P/fKzT3qZlzpCYfMAeTkFWkwtZ800nDhlqWVDqwaaPDiTskdo +MgbdeY9En07i3sQLCpLbTPUuJZAjJF4Xd6veORjRM+VAkTDHDZDNjW/sOYylh7W4 +8v//tJwAnpJkmeqmnfB/m72ezVup3LGLM1uCqk5F3JgHu8Nh7cK5Agl36fx9osva +fZARlXRGZHnLgIOsjTdxJhP6vTcQ5LQoObb8ZVmDwo24/6c+RuGDnYq14paDCqym +TXUoEXXVrNNgr7dlKnEULogAiEvL/KU8FN4e63TaxrE6vPBV2yjQTukEBS7tayBj +8rJzExkVX8DGJYQiE5KSkK3rLnJCtVC10w2JEpvS8MbMRH/pcRHDKWJ4K7g8Svk6 +S9157mRQTxwCUeJ1d5lJs8UZiyG491OJQpdDb9Nvk8+rl68CL9/+ecK5JDVCZEl6 +bz+GHeSNKpxlKVlJBnJ1EnGaoLhGxxeozCWWbSlfrg1MYXhfBGEaz4f0AcLLZUQK +6dDoJIEJgsr+z4IUfKp8/pBT85jHoVJjynpIpJO5ry6QmKevONKpCxIwZIF0hulW +B7DF5jLNXyxoK3rYMm3h6TyPFjMRpFqOWRiikanwqUkr/Wla/pVCg1klxijXYqlF +ppu+uk2u//3IjJTsL7z+3qHAzkz7YNvU9sG/jnW0cdVPKxgz3I8KwpDB5iTWBUs3 +S80gfR0KvyPC/OCORhkA4uEYuEQbLeS7EjHDkqGHvJ9Pbn0zbsMyQXThcVyO3vmn +ZvpvTyIXFoKmEsRHNisDv2+1CBLtUz9jguQp7p/Ivw7iUaTIJnXqFghSj6uX0QIo +9Kd90aKBSMpIsIPEq7QWaaqg7KptvE1iGV82OXFTk85yqiaaciLl48tTFBIhY0E6 +Hs3dDe8E+dEAkJsw+E77TdUSLxII3aMPVAfcHZ6EzCVTpYalGAcI+NQFF929WTNH +lp62ZXwm7dibcSs4IkGt8+H9nkDWI1Q0Rm7ssBwxY8oU2B+GAqCWVGFb7zvU6cO0 +bzNypUiHBAdRBLOL97DRE957pmo59guaUA4NwZ528BpWbhLm9tVr13QUYhkirB8r +VYYHYsjZt2PY2+EdG4jcX3tJ15KVL4Q51a7Xpiad6dKZOntHY6AUupnx3OFGIOHE +Hojn0BwNRdyJMOnAXU4ItrDtOFtDBZb3N311uZPMHbz9xt9UMXCKC8cRF6ThITOz +osALdrnkHTNCABIxpd/Lks8pStif7/wU7pHKEO1K0AP+JNS1ZEHsxUEmUFcqr1or +mopzC/nYcEdzE3GXYfdboA+zT9lRDc/2nIM2A7EAU8Nb4x6Th4P/bhG/psb/WXmv +mH7P8X4dYwHqF1jOM5sYLybVUgzhBEzlsnrL3ueu2Qee/2FegJnAqzoHVaGUYx9w +K2OZ02lVF1MyECYVwO1FhG2L8UgUIfdBrecDMnIYTmRmmHAHC8jcrvhPAMnw8wJR +VuQ73QMpOLpSefj1jpJHwk7Co4kEN4I/VSXt+RwRARbb39jJOGOvo5XiJGg2+sAm +m/RSjkK0NRksoTyB+rnVtS8LbOIT+HAyE6mJBGWM88lJiWcVZj0zT3OXZeN3rcwl +HO0AVuaXLmoiv35QB38t5ACxcNgP39rjqiA/HwOMXasPDnhegITXW1a1AykmQKxW +Fdr9m0f6+iFTZMBL1hT9ucBIKSHv7uTOR8gMR4CC+VLhaBp3JEOjkdsxEuyJGf6L +R/AdoGjmyI7nlm/UI8zPYDW5eY4zaNMRR/R3ahB+s1hqZZhk13vuvDhgB+tqt3Be +ni2vzphiSe/5aXCP5tA+gQ+eQ+mS08tFWqETQXTZ9ppywyli8Bg/5EAmpllE5Yt/ +tF5fhBmsTJn+7OWASQajpNnhDrTeuQE3+055f0Fuceqivu7w2OEioOSFBCASVGjJ +lw5boZ/00YMDL9069qNTzvQF1Lhaw3JbLCdxRzRdGkAkL/cvF8kP5YRr1Ez8dELr +eGI3e+gX9wi/ZrhBwlPa4Gzg24Kk0H8Cx176L3/OsD/xKvFyI8rXkylgOAAMsOjU +GMsBUcclRhz8lAMQzlLnUCyBzHVcIvmW1tosz61+FTYl+PsLa6vmQAUnTu5nL2qs +yaneyN0jXqxAEV1dwPTpelAKG50J54OCEdtLeYRufPtYffTekawLDb/Mvd21OywZ +SlQT+4vlcVVa9rBYXEArOjSSPyAnHl4yj3k7bqxdDOX6TfG00m7rOnEVZui2PZIM +AQRCxLINtm3meZ4LrYr7VM0/zoMh2cpxP0CIiSSv+dxM7HOKiFEJTdH0B5VqnYne +nMRjOq/vpOtsrBU+aTZA83AqlKhAyhh7Y5fMg0ELjb3Tf44iuhj2WYdvSjJa2UzE +PqCvT+KnyAaAcKnCltXMYcFIZvpnlrkkc8Uy67kqkblH1rnUUjTuKZOBXE2Ks7zP +tmvsVqK6ONfUH1jTJYcuxHADOswewXi2r/ZbypPZtplw5tVxNTuJsjGKrkR2rsXF +hu6hCC1DwXvyrwkcyNgoNsu8+NirtxlYJNuTHcDdw0ZPGw4fJO6uDFWv0csX8E5q +/sTodrI0dQLijidtYku8eCeqnUmYBXjVexheqrTnuelIV9TQJQzG1iXeZUphNWgN +ss+NQI5ht4KeWgqDM8bQJG48921O/rR7zy4dcsxn4PuJQ/fM1CWYBpfXGEnB7msW +bSFlsr8IgFukjKkOTqSiMmHBaur3/y8pyWtrJNGJdjqUjgiFr3nOc0uT8XWNThRf +r7eTTANk+8OvS8K82LcpmXMzaUkJ7rtgqfg6+cyMDtqQalEh7ggFqhRI6Kv0n+4h +eI8gX+McyUJo47f3ew9qg0ccO/zadKn6WdTNyzkPDXxy8a1UA/8xZArCTVjhAoRI +gkQem2x3clbdBnUsByBMl7rzIgOhdCqx+dC2ghI8ummmJLI99Kdhq3vy8qlbLA4o +zauw/b96C/GPrGVkOxHDGkhc5MPEadWiJZNuouqt3Qx42lgy97RWRBsjBrgk60F8 +XxlUXQ8bmI/aBe4bBXLDqZEiALgowYWCvS1JkMpmjSl+vcCzXL7XypGqek1ZFrQJ +D3hw+JnDseg/8xsle6RUR9MjrGZZXsgTQ8ULAmI6PC8K7WCJGvpHSIw5FXqGDuJM +v3/yRRWHDDLIwKC+3doaIAVD07ww53URJ7lweZk4fjTLIkRz6U7Q9fEl6hZVH6wU +fUqRQecAXcdc+2ZUKmmo1+yp1RHLuo3GM25bEg2TcNQkuwcfCgSlGprXQEOOScH/ +xj8fzxi+l60xgPNdg+TcbAECfpXbroVNkv3rw+axN5TDmaYa3pNDbiGykDpp+u2S +4ussqfHGXTt3SeXnDlRdqyZp9QNbn7cCfAivqZyR0CzFMOpIZf+6u9aIGpAdQVcS +eop6iEnbUeEP5/+EmcEulIlvqh+xKk5ze2YUqE6W4psxozpcVdv/B4hlOfmVmMBf +IcWUE4qyea0Gcpg2BcgovgmYMomUN+QTCRUYCB0l69k8gwjrUJ1H+yvxGhoZdwkO +7PCYujnZF4MZOqN6FSsT2MrUwlX9W15KljHspARPuL7gcAz9qmEAJg== diff --git a/scripts/mount-shr.sh b/scripts/mount-shr.sh new file mode 100755 index 000000000..7b2048aaa --- /dev/null +++ b/scripts/mount-shr.sh @@ -0,0 +1,92 @@ +# ============================================================================== +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# Mount data sets to USS mounts +# ============================================================================== + +set -A mount_list "/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" \ +"/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS" \ +"/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS" \ +"/zoau/v1.0.1-ptf1:IMSTESTU.ZOAU.V101.PTF1.ZFS" \ +"/zoau/v1.0.1-ptf2:IMSTESTU.ZOAU.V101.PTF2.ZFS" \ +"/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS" \ +"/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS" \ +"/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS" \ +"/zoau/v1.1.0-spr:IMSTESTU.ZOAU.V110.SPRINT.ZFS" \ +"/zoau/v1.1.0-spr5:IMSTESTU.ZOAU.V1105.SPRINT.ZFS" \ +"/zoau/v1.1.0-spr7:IMSTESTU.ZOAU.V1107.SPRINT.ZFS" \ +"/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS" \ +"/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS" \ +"/zoau/v1.2.0f:IMSTESTU.ZOAU.V120F.ZFS" \ +"/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS" \ +"/zoau/v1.2.1-rc1:IMSTESTU.ZOAU.V121.RC1.ZFS" \ +"/zoau/v1.2.1g:IMSTESTU.ZOAU.V121G.ZFS" \ +"/zoau/v1.2.1h:IMSTESTU.ZOAU.V121H.ZFS" \ +"/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS" \ +"/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS" \ +"/python:IMSTESTU.PYZ.ROCKET.V362B.ZFS" \ +"/python2:IMSTESTU.PYZ.V380.GA.ZFS" \ +"/python3:IMSTESTU.PYZ.V383PLUS.ZFS" \ +"/allpython/3.10:IMSTESTU.PYZ.V3A0.ZFS" \ +"/allpython/3.11:IMSTESTU.PYZ.V3B0.ZFS" \ +"/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS" + +mount(){ + unset path + unset data_set + for tgt in "${mount_list[@]}" ; do + # TODO: Can use something like the below to find ouf a mount is in place and act on that + # df /zoau/v1.0.0-ga | tail -n +2 |cut -d " " -f 2 | sed 's/(//' | sed 's/.$//' + path=`echo "${tgt}" | cut -d ":" -f 1` + data_set=`echo "${tgt}" | cut -d ":" -f 2` + mkdir -p ${path} + echo "Mouting data set ${data_set} to ${path}." + /usr/sbin/mount -r -t zfs -f ${data_set} ${path} + done +} + +unmount(){ + unset path + unset data_set + for tgt in "${mount_list[@]}" ; do + path=`echo "${tgt}" | cut -d ":" -f 1` + data_set=`echo "${tgt}" | cut -d ":" -f 2` + echo "Unmounting data set ${data_set} from ${path}." + /usr/sbin/unmount ${path} + done +} + +usage () { + echo "" + echo "Usage: $0 --mount, --unmount" + echo " $0 --mount" + echo "Choices:" + echo " - mount: will create paths and mount data sets." + echo " - unmount: will unmount data sets from paths." +} + +################################################################################ +# Main arg parse +################################################################################ +case "$1" in +--mount) + mount + ;; +--unmount) + unmount + ;; +*) + usage + ;; +esac diff --git a/scripts/mount-shr.sh.encrypt b/scripts/mount-shr.sh.encrypt deleted file mode 100644 index c5ed09837..000000000 --- a/scripts/mount-shr.sh.encrypt +++ /dev/null @@ -1,71 +0,0 @@ -U2FsdGVkX1/97joDHr2PfbtM8603L6QRNH7YOTwyx6+WEU5Emp9YwRLWVFdZPxrn -GfDQXpdmqZ0shGjAPjfhA7ZMVz3qmWZHMr2eCMZhFkoBT4PjBYf/GVfW751LYT29 -7EbpJZYIkXE45VWEw+79zetmPr8xxQ0A7Or1prUAqzUcjSkPvO4iUSvPpn7GcubI -3Pcmls6W+Jd3KrT3POaIPFQsFKmp1lBMQGVEk3/4GGP3+aDhcDqjuV5BmqFs55Ue -YhE7Rcc4nGeCPbKTYe0ouU5VR3WVNJX8+UIvzL0mH+01Ctjn3PcRODzklTtqOeHu -VBGHwAH+NRo36iMw9wYZJZkT/ay8UlVySvKrAQ+13lxpKA64Eo/p20rv3ht84QI6 -lc8JgNzJ3RKo9YCK+iz3UPfXqPXen1q3vto36mCdvP0pv/Epz6PtQaGxXO4gEFlL -jmP6IfGurMa7xiluPxjXFhhb/9I0VDTFk2dEsalu3qSsvabRjJtL0bSW0jWFMTAp -+Q7JH6WjzSJQ29qCR/NmiPjFbl3AYIcQjtMJdziVOksZl43IRM1v6XOVfvo4jwOt -C1iWfqIIcLzsW7ccFqDAoGFwDWBdqenL0KFhi0I5Goi/HJb1pJyYylD5PChOv8rL -OnXH4jDvKEeNnYv06rmtR8rEzngDLt6x0wKQx+EXmEfXKiYbl7Dfp2tFLbVdSNcz -j6uXgIFaRqOHzYD8S0J6FpRz4dTMK5hvKxDfhzg3x91alQ0zQr8+kzU6kOWq5FF2 -2WfEhFYKZTGnL+E3wgkeN4an6gd1mp2AGJm/aTDBIZaHe75uUpfg1Uqm1e/GhNNs -wsJlNZDEB5FjAi+11mrGUUuRTYSVLBVmkYvMbT2nG2QiUuC1tAsH/Cv7X9aEXAsT -oYNn4pAlXmHllws12e1RWaOuxkaX8R0rzfG+dA1DVXzzDNZkNBS06Oddrlp8X7Op -Beez9+PpvMzL3X2vuMiTJYLQi2kk/wATh3DHMP5W+9vHxGYcx7O9G1foFiQZsy0y -sDaH53ge6KNFnS9wBACE0R9vEps7oruTCvxAmvFXv8bXtJx+JmDoFcpg8aN8dqvM -B0hZJjtzpdPz0y/f5cXlkhgGXRORwZir/okdg8ffs5WqLFHZO/MWYdWH5/ws+DFs -eMVlxafcmTvgtMZjRNO0Z1s0TMCwDCBy+mv3YbyFLzBcRZFBuStHfdc3QvOQNJer -J09UpweP5N7aQnivgbAstAMBIHR6WfRBmASkDaN/YBKF4PiPWgOokEywiVXAF4Iz -ph2Z23q6b+fTC3tBYRdPqm55cx/IkLMLJmmOcGPCBY4fv7V9QpPuYZvPu75esCC3 -vzPNxKKmn4r6dwwwP/R9xiwAIPS8ZE0rtD1xzi7KECsp2AMGFtg8zu+Ve4xjJUxZ -HHCcydRxlb76rz6UNHMalw1ywtx7afJdzoAwQzef0tmnfAI1w5QIi5bB9fo7VeZg -SVWVemnTamFm0SZcgZmow7k+FiFsZAxwe1JE0FxKblgpdBqWEUYkfosYsmll6OsD -HOj64MsW4+X25ygJdBkBjFsY3xa56cnzAyIkuZ7j0ScuNimQzNpdt3PT/khY6CY4 -vSkIja4TapNAoTwlTwLYBVLzS0pT78VS8JsX0cRFNCsISLatGD42APeCcGDzXB+5 -fo0+nJsjI+hLTMG1gpkGpZugChtoHMmRBMcHsblPh1QPsyD3RqStpzNQQcM42Xw1 -+ZrVxoeePj2boMmu5R2l3Rx4O7bBDPbD4SvPSbxX6s7dXeu7Hlox18LMA6lhMaU2 -8DmtcdUWRVTNsi7KifAh168qH13M9xBRUXW5n3K65W+v1Fp87mqp7sbXFRXRK7Gw -OBTScWf99JRVzJiu77DsxJqilLo72KCSbVuZkYQtDMnagHukPWplomgmGkaQf1QH -bAMAlcFMm8Oy6EDQjsAjtxj4hoFMYilN6svuau4UwImfy2Dw7glRppYJOtsa/lgY -89K5JntejJbBxObvOhatH2Lybi6xOZIBDueYzrefXJxb9l2Ul7U3AUhFlcjCv0uc -fTwJrRzVpzOqkzjWOZpS3KSSu/+5LCN4+75Y7/2A2OicpuD8dAVGeSPBxyiMziVN -aBt00GKdLe6WnyofDTnEmEQ+LLdS9fyqPjBquHTlZyF9BRwU1xM4hd5IrNlmusq5 -57MqDaL9RRMgB8+bx9TTSUQQRAeSdumk/ekUykMFG/aItjFNR8PEth340Cz0x/ZZ -n7xgo6mam6yTnYVcA0lBfhRd6X5OzZEkj+y9zQmhKUBe8NKI+bdxEuy1McdCeRb+ -XRI2SBiW/k4PiTZLDwrP2UmeR58fO4OHguyO0oPfnTmbYunqKNg27TxuN6R/MqBn -YDV/gS1RYO+lWSfll2cQO6cWSBCgJVw3Z+XAlGGRxiK5pMHictO8Tq2O0w8AFWzm -85CT7Xut9fJafdom6h6lK299UKmVJXzqesw5keYUv4yI4wAgdjJsQ1450jMT4CkK -WknpIHbwcloCM20OL9hVhTWtUbUKQGWwnauRabLDPSoh582JjCAAxi4keSeWyah+ -z4TGFi/ft+ZAFxhmspSjILUye7GtWJgXg/GFK0G2YoB01bXcINduXVbLfT6FvWOn -NzXgzEQF9/BfEkBQYKeAbUyOP5izaV7fJ+UPlub1Cl5/v96zstDfKie+OHHKELWA -3WZbxcRQdJKere+ELXCFRxCEqnPXY34ZhwAiPYswKRBPIkBbTrtdRYaiEO4XuDtu -g6hPhgQRvG66usxHnpGxPONtDLN54uQbkUMnLYV1mNQXCtaFekBH4J/qZGI9rpMt -7Q8bE8PLCX6aHs4+v2IqMaEj8LhxvyXu0Pewo9TrGAnL6TBg2rA6mB6UyccR3fh4 -pYaLq+P8oUCKm/ZzI9k6zFHZIsLsBtIFx+KTZMYbpX4TUGa+0Xd9DgEJ7wH83dTS -hdoJzL3dEXwfnGeqVVhEYLUi9NXhEEEMBEyPoH3XLRLXXHZEPdhDSg3kB4md/Q/a -Xf151A6UsETsRdZaJBNK4RsjdcIkheCsE3rAuRiErm8rAASThxoAJ0VWfq96lMar -D/OkvVXgEYVDRc04DyH4W2DQTqvi+37hjxxOoxUVK4sQYR4wkJHYhgvyYCzi6ju9 -1+ZBexKErbnfuOgDpBeUsyzLKrSuCFMz36RpslgBGe7GeKRfUjL9ehaBIhsDntbJ -z7aIzsnUls/yCA2O7Cc8tTvteZy98HW9krgqmD/QR/EKNq09kw5EiKlq/dUpSgsm -Ql4x0N0nFirqnocRKwHWJ6eQokD120/OjB6m0unYp4l4fRll+fjOeZOGkS1NM9nN -OSXe9vM9IGR7ZPrdeIUJjFKQVYzDJNe5FhbHqfaMCzbo30haEfXqqKY7EhiUQ80V -5Jbqu47byjFmJ7mtDoQKvpMXCH8edxaZqZU8sR1zROjLUo56UJa3g/lAmXrle9mD -b5lphZBOuYuzFIWov6DEUoTmUuyvvfMJLMtPBdo4bHsZl/XfQzFyDmpZqyM1OHGU -TgUGfmBDbCXYsXWCE6i/l3eTrn21LXRHwavaeuex0NNrT6S6fQdSbh0kTGiXpgG8 -oOPXO37BZkogXLGgHnPlpBaTtIRbKiq1fkgdNNec3aNhsI4oUCQycrL0tjiynwEd -GRCHsNdMpzLBN10xc9zOmn+Jfth+qw12COSCL+3OtduKOkFOTr9GtPU3KtPww8Xw -/8XZ7vQsyArdluRD9SHDcv2M4EIZPI/LqRSxvGFLzsu55W0/byW72dygv7dRSvd5 -geZXPO0WYDbYdJX3Ix5oCIdjWXDoU3MY6/GFEFQULdRgvGmPN2gl33IkBCcHOJxQ -JVglnmqb4W8v+veUbXgYOVCrWcRSMC7xhzMkxmt92sxg56K+C0JnLBx3848CzFLx -O3WlizRLYwV5sYx/fHvPiNgzq9BpCKozkbuh01wKo7n7Hs8TOBftzxtT97lQqSWJ -NdzZTGw4M3Vx5Qh4n7rLO0rG7XCJ6QcNf3rzY4Jsb40cMC530+4sZc8Cnoa34Q3Z -3wRAcZy5pjqOp67siYT5n1/c0TraiT065pdjwAO1GRQ7lBq1R3pc8rRx56tuHI6S -vUFptTIBTiYTfGUTNNV+plbuHc/Ke3YNWWI6IGRl38CmacOi57YbWKQfodjABgQn -B/y7EIGQmmcLyotiAR+aPenJxcODWGk8pzyiPhOkdnkZahQibxRha9ozv7kbx9H2 -eJPzW42Klv2y1EHjl6goF6ZOb4J83WwejTeBH51PudryI5ALm4t52dhDLE0gjhvG -s9OtZyIu7LaJ33BJHrZGqZOhhugbVqep99kbK9lV9Yapq+f26nzuQB9PENtizZlB -jF1zJedzfWmCtzzD+WyNR0UFaivew9dqopZUtxTEdEmrxyBQjBq/58qS7TF1U0th -Eh6KGOdwDf48IEgfteQKqRodkUjxdk7P2iymYB3HuBnB64DrM2j0GrPw3fW4/eBn -5vA/1aUyAQg5x1eFb9XMbf9umaAyulCB diff --git a/scripts/profile-shr b/scripts/profile-shr new file mode 100755 index 000000000..22da6d2f7 --- /dev/null +++ b/scripts/profile-shr @@ -0,0 +1,230 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +################################################################################ +# Global vars +################################################################################ +zoau_version="" +python_version="" +python_path="" +bash_enabled=false + +################################################################################ +# zoau case match +################################################################################ +zoau_choice () { + case "$1" in + [a]* ) zoau_version="/zoau/v1.2.0";; + [b]* ) zoau_version="/zoau/v1.0.0-ga";; + [c]* ) zoau_version="/zoau/v1.0.1-ga";; + [d]* ) zoau_version="/zoau/v1.0.1-ptf1";; + [e]* ) zoau_version="/zoau/v1.0.1-ptf2";; + [f]* ) zoau_version="/zoau/v1.0.2-ga";; + [g]* ) zoau_version="/zoau/v1.0.3-ga5";; + [h]* ) zoau_version="/zoau/v1.0.3-ptf2";; + [i]* ) zoau_version="/zoau/v1.1.0-spr";; + [j]* ) zoau_version="/zoau/v1.1.0-spr5";; + [k]* ) zoau_version="/zoau/v1.1.0-spr7";; + [l]* ) zoau_version="/zoau/v1.1.0-ga";; + [m]* ) zoau_version="/zoau/v1.1.1-ptf1";; + [n]* ) zoau_version="/zoau/v1.2.0f";; + [o]* ) zoau_version="/zoau/v1.2.1";; + [p]* ) zoau_version="/zoau/v1.2.1-rc1";; + [q]* ) zoau_version="/zoau/v1.2.1g";; + [r]* ) zoau_version="/zoau/v1.2.1h";; + [s]* ) zoau_version="/zoau/v1.2.2";; + [t]* ) zoau_version="/zoau/latest";; + * ) echo "" + usage + ;; + esac +} + +################################################################################ +# zoau case match +################################################################################ +python_choice () { + case $1 in + [1]* ) python_version="3.8"; + python_path="/python3/usr/lpp/IBM/cyp/v3r8/pyz";; + [2]* ) python_version="3.9"; + python_path="/python2/usr/lpp/IBM/cyp/v3r9/pyz";; + [3]* ) python_version="3.10"; + python_path="/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz";; + [4]* ) python_version="3.11"; + python_path="/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz";; + *) echo "";usage;; + esac +} + +################################################################################ +# zoau case match +################################################################################ +bash_choice() { + case "$1" in + [b]* ) bash_enabled=true;; + * ) echo "";usage;; + esac +} +################################################################################ +# User input for Python +################################################################################ +usage () { + echo "" + echo "Usage: $0 [abcdefghijklmn] [123] b" + echo "ZOAU Choices:" + echo "\ta) ZOAU /zoau/v1.2.0" + echo "\tb) ZOAU /zoau/v1.0.0-ga" + echo "\tc) ZOAU /zoau/v1.0.1-ga" + echo "\td) ZOAU /zoau/v1.0.1-ptf1" + echo "\te) ZOAU /zoau/v1.0.1-ptf2" + echo "\tf) ZOAU /zoau/v1.0.2-ga" + echo "\tg) ZOAU /zoau/v1.0.3-ga5" + echo "\th) ZOAU /zoau/v1.0.3-ptf2" + echo "\ti) ZOAU /zoau/v1.1.0-spr" + echo "\tj) ZOAU /zoau/v1.1.0-spr5" + echo "\tk) ZOAU /zoau/v1.1.0-spr7" + echo "\tl) ZOAU /zoau/v1.1.0-ga" + echo "\tm) ZOAU /zoau/v1.1.1-ptf1" + echo "\tn) ZOAU /zoau/v1.2.0f" + echo "\to) ZOAU /zoau/v1.2.1" + echo "\tp) ZOAU /zoau/v1.2.1-rc1" + echo "\tq) ZOAU /zoau/v1.2.1g" + echo "\tr) ZOAU /zoau/v1.2.1h" + echo "\ts) ZOAU /zoau/v1.2.2" + echo "\tt) ZOAU /zoau/latest" + echo "" + echo "Python Choices:" + echo "\t1) Python 3.8" + echo "\t2) Python 3.9" + echo "\t3) Python 3.10" + echo "\t4) Python 3.11" + echo "" + echo "Bash shell" + echo "\tb) 'b' to enable bash shell" +} + +################################################################################ +# Message to user +################################################################################ +print_choices () { + echo "Using ZOAU version="$zoau_version + echo "Using python version="$python_version + echo "Bash = ${bash_enabled}" +} + +################################################################################ +# Configure all exports +################################################################################ +set_exports (){ + + export PATH=/bin:. + + ################################################################################ + # Set the ported tools directory on the EC, see the tools you can use, eg: + # vim, bash, etc + ################################################################################ + export TOOLS_DIR=/usr/lpp/rsusr/ported + export PATH=$PATH:$TOOLS_DIR/bin + + ################################################################################ + # Set the editor to VI + ################################################################################ + export TERM=xterm + + ################################################################################ + # Standard exports used in EBCDIC/ASCII conversion needed by tools like pyz/zoau + ################################################################################ + export _BPXK_AUTOCVT='ON' + export _CEE_RUNOPTS='FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)' + export _TAG_REDIR_ERR=txt + export _TAG_REDIR_IN=txt + export _TAG_REDIR_OUT=txt + export LANG=C + + ################################################################################ + # Set Java + ################################################################################ + export JAVA_HOME=/usr/lpp/java170/J7.0 + + ################################################################################ + # Configure Python + ################################################################################ + export PYTHON_HOME=$python_path + export PYTHON=$PYTHON_HOME/bin + export LIBPATH=$PYTHON_HOME/lib:$LIBPATH + + ################################################################################ + # ZOAU 1.0.2 or or earlier ueses ZOAU_ROOT and not ZOAU_HOME + ################################################################################ + export ZOAU_HOME=/zoau/${zoau_version} + export PATH=$ZOAU_HOME/bin:$PATH:$PYTHON:$JAVA_HOME/bin:$TOOLS_DIR/bin + export MANPATH=$MANPATH:$TOOLS_DIR/man + export ZOAU_ROOT=${ZOAU_HOME} + export PYTHONPATH=${ZOAU_HOME}/lib/:${PYTHONPATH} + export LIBPATH=${ZOAU_HOME}/lib:${LIBPATH} + + ################################################################################ + # Custom terminal configurations + ################################################################################ + # Append home directory to the current path + export PATH=$PATH:$HOME: + + # Set the prompt to display your login name & current directory + export PS1='[ $LOGNAME':'$PWD':' ]' + + ################################################################################ + # Run bash shell: + # I have have seen many issues using this version of bash to edit files on the + # EC, for example of you edit your .profile with VI under BASH, it will render + # unreable, for times I have to edit, I type exit it defaults be back into + # the zos_ssh shell which does not have any issues with VI or editing files. + # I generally use bash only for history and running commands. + ################################################################################ + if [ "{$bash_enabled}" = true ]; then + bash; + fi + + alias python="python3" + alias pip="pip3" +} +################################################################################ +# Main +################################################################################ +# User enters choices for zoau, python and bash +if [ $# -eq 3 ];then + zoau_choice $1 + python_choice $2 + bash_choice $3 + print_choices + set_exports + +# User enters choices for zoau and python, bash defaults to false +elif [ $# -eq 2 ];then + bash_enabled=false + zoau_choice $1 + python_choice $2 + print_choices + set_exports + +# User enters choice for zoau, python defaults to 3.8 and bash to false +elif [ $# -eq 1 ]; then + zoau_choice $1 + python_choice 1 + bash_enabled=false + print_choices + set_exports +else + usage +fi diff --git a/scripts/profile-shr.encrypt b/scripts/profile-shr.encrypt deleted file mode 100644 index 195e4f62c..000000000 --- a/scripts/profile-shr.encrypt +++ /dev/null @@ -1,197 +0,0 @@ -U2FsdGVkX1+Ro2OkwERnqQxRgqE22raIG0nuZNU+s4yMx5bMTOFAwynkvI/D4vy2 -rb+wSWs/HErbUngILXkAWf8Slq8hjumbVIBoST1PyBWIaw61cIBHVRZymyfwv62o -NPNkcegh24mT16s8zGjHItZ65hxbgfWo7mKZ0hO8sC/fCzeTPQEvc0fuz9KacTez -sqtaz/ZgTpUG+Oq+a+rhhabVfBwpZQ/U0Rm/U4MnY3Ixhzeo6xWIETMWO8oSxc9N -GUVcNk5soo6rwOvuL/s2tHm8rIyHsxSxDtoCfLy5euIwEOriPgvjJ9fLV1KVqmQR -WHAxVXuBKOu+vA2mojPuPYkf9ltYW3yLDkZJ41YSl9KznjlEEYhFgQQK3Rkiqq9h -C1MjNwP2DalMLE628BXT8AUc6HODuHUuSq/fB9UiWqrsHal16quyOTekb+Q96J8j -V1JzfNGS90qhQTWUlAQ36NdD+60bchtf3+A8enaMqCequyaY3PLDDi+OYFgVRwcM -pXfcLfDeqmb3/Q1Jd8RBoz7kZZ19os0ad07HOJEPQHkFSNUfSEGwQ1vbk7R77zj5 -u/gyK3/RIYLSIMpgbdweAx78tYmnABXJ/8yBace/MpyHmUo2jxL+hweLH/aT6xzD -MyISNtK/Xw2aButSh3RsVegNDz0gkid/c4dVb2lhZ+i/rDaY2kZ+8vYr4A1nEgXP -n8A4IhIx0u3ssEOnLoTvBMBurxi0aiqyknKIm8GDveA9l9d7J8x41s/SXWM9mI9j -2/rcXbTQRn8nCmFJazdhsnY23b1O1cdHhw2MMkzQ76yE1iTMHLHS+nDJiR9tf1Sd -da1MXgdOPV+rtCvMJqFPizBR+lRWHEj0w3zJW0dPIbZXySUgZRuFa1L6sQ6DmHDf -Noo46fdezm+usBoJHwvc/K23yhYDrYAPPlsk5eK9mQEvlgFLnPZtduKMwys9NXUt -a10rlLQvMC2NeP93dZCabYmCYnFdIAl/kTqpjs+uC8kFDsdY5W5sS3Z863DBX/pa -mB4aR0B97mfWaGHRqxXYwqehvYmy6Dhq11aO4syKOERqrzQ+xw9NrOJ66SmxZdMJ -ckPx6yoIRew/3fdkGIdgtr7Jd0vDkoKSouXAOR1sdq42ZrLse/Ee7tSiSuy6kwvX -BBzPxKoNyKaNXTg28W0N61aJGn3BrEG6Eutx37VgC7wSk4awjWhxS0ld8DjIbxFk -hQK3+BJqW5BWQQh4FkHfvsWgKSDqzLWbXeMc0WacQJ5tSaMJs+Wc6GTbhs4zcEdm -XyBYsl4eagtr7KMNtLJGzFd06QQzb7T/wbE4Z7V4A35LyGcz/KAr8jPaFT+Z5bp3 -FgukvBkjCaN+BHOakmXPph7zeF93akIySrNm/igFauuhQ2DgqMTtMOCg2/XewoKO -ZbOWcxDUPy2yqlSg0srKYHX+8IKLX96zmmryQA1PpLYJmVZQ/t2muyCoMV137DrD -F9gy8Vul+pxkmXQ9ezWeeaBLjGU+7JFCOMJBkFGbUt4Qsjm539FrGZ3Tne3YZ2cu -PlFJlpYjZAsBnhIKXY7AX9vkT5L2rjuh4VZrLNbcuNDxM2OnH414r5uWoyieFFG8 -W5xhHJ6i6icYjab9PeWkl08hLLEL160ib1oPJFdNiFVL9H1OBhpO5krYLMG3/N/Y -B0u+u3ujhxP5xZkCI0mvMZWO4zDlYUp8w7ci+iD9NzOK4DUJgXmxTswtLEa49mOw -hVD9ZuZByfwZChPCJ+7i27CVrZWir0O9ffjOD4AZ/bpNQ6/TAyS6W+W5fmiT+Rfa -49YbVmuPFS+uEzeDJ/rfpbO32bJ9fGgLICyp90dCss8lBoS6BHGurkf7XimmSkT1 -3onAZ+rs6T+pYrcAqBhscjNQpxjX2Tyc+6M/lxqsNFfG3Dd2SE1W9Box/lo3NS9/ -EOvNOevjjpBlK95tv0EiShPrBROcBvNxbxlteCtfVsMXldM8Oz12MtOZiT3nQPSV -TVbH7v/QNN4E/2Szb4sAO86Rwv6fGXTKsmOM6k1yiqi0y6j9WivwuKn1IiTpAajW -Cf8uQrXFJq0nzklq0b+W9xGHlCQJIHGCUFRHFqLIf1WHfi/jqYsjyUsAGMtcmNoN -1Ggzh2tX8vEWH4Qy0g+bEixlfn0eP22UWOyxmeWpOkDtQuBFBau0Dtns6I917X4Z -INRls60WtPoxWwUJqWfw0PnvkZef6e7qzzT66QSmuyMtIseTq3DYHVd46Rgq9c0d -ZjO6+irreIfmzoZuzHJnNuzj1mZeladcbzimGjrB9mJggspNBrM50q7YcuMv+y1o -mQuAIPb6L5u401Ti3BLMC/+fsgj1gT90r+cWwckXmtZZVaYB6xP8K3qlAzcf1lP6 -TXmHRC3wywS3aDFLHU446KU0NwjQ98Jnc/f1ZNYNTtnlx0Cn8JMihXVFTDn71fJl -papY+tk1LGp+ViZzkLoQ7jrjrOmkQ/uKnTxqITpBRlZrv0W1upz5nqdET4bd4D8n -+5r5ctsMCY/0Aa8VksgRxsE1fp14XItlH94aI0UFv+eZ1gJwsal5qyTP6phtP2FB -i2mZP9tSjggkB15YRzlQ5q9NHG8uBktTVVC9E4blDTVaYXfxe7RiQ4eFNIKGbkET -4a07zb+wMIOXSxhlj0+qK6LJubZsDYQYSSjaaNL4hN4dn5Zfh20WtuF1lD6+Ujcb -Rqqa3CEVe763CSolSijrmHOd/GbfKQneeo1nYD8TvwSiGiI7iIcF1Y6LcMBS1fIx -FIfiwCjbwlS5C6lfW4ZsqMutbH33WjLlgsibrPG5t9kIRm+AeGrdpu79Dr4VYBjc -FPZ11LbN/aKaokjCXomK9grUpIgT4Xxra4yLVprUjgzfhDBdWVO3011SO77OgCtQ -xJYC3V+nmh26VoAPHUEw1Ep+Nr3PA59TdI87tNqDq1fG17Nc9cOOeUbgu/84oP2J -KEGfZnNsryTF5rhIagFqUICWvJLn+IuOF8HQbtYnvQxoJLy30YoFij3QX6K3f/42 -BokjnHXI5RELnVWL/Bqfh90x+cqIaRKUWzrK0P9dirTDdpiiI3wIXcUhysjvoBTw -Gd2OQTAWyQH9K5bkiaeloJaemPWcnbB5ELJTNnAtu1uNIP/khHI+rGO9NNfNTMJZ -MC3R7B/VCa5MhTt24l3dHeiEKbBjG5Va8D9zEVj17DQJWS8/TuqeV5CTCoBM+Z0n -pae0ldCJEG8yuoLEmy3qcTZC97RwSqHoJ04HxFIzAKon79o6TaNKCeuSlWP14X6/ -uuNvFvEj5MCkJKH+P0lZuAtcheZQqA4V85tWg/a2K912CtAdmaCA+kz8DzV5kMNT -CNZT2mnGP8C4Zzi2ntX/+NuitZIeVZYdxyLUj0dIv/5HL8lRxjRonI37Bvs3zu9o -edN85602I9fkI2mYCgI2IeFRb5zulrDIurrlrupl4r81h1M3uaFfFWtZOqtWr5qp -Sr61dCr7Mm4+/o/ex20tl7YFT3UcdvLZYV2oX3UVIqnhCQfEDDFmQ1kX+0sime1w -9yXWVlMhERrlA0VxABj540SVtA/gpGIP6/Vk6qcyW1k/QlKldtqHpMDpnX6JF11j -EbXFf49NExU7COmWb3TJZqTa+P8mYaMpVI6pe331gyIcuFuVKB46o+LtbVYhwfpA -g8O1xb1qsFZI8D+6sccq+4C62EkFDxWyyXf8BpmgWR1+asiJOquNlZS+//IXyCx8 -xLcePsMS6CfXY9uZcc+JYDdKDDNDYokaXt8rJeo4AFsgyzp2yZ8KiVbian7k7uc2 -5JpxMSB9wop1ZR33TT4T19cuHeedvnSFvJugu13TT52qoE3Ho2IOQ9kGUpjSBXXP -K38dQvKT+NlTmhCspbrzdfvAQEQDijPhITXR5GvqmxkwZgDGgygvjDxRMiwdRQXw -ZL3ifs6XE71pmZmK6MpmT1Bec75mVoiX2bNqBZccWJC98jMUCMwSdA0RAr6PCk0E -VB2bRxes0dnuztnV4zuJFGOG/oH6r7QMwKp61KGwIeZhwvjeLIvYWWzO/oM4a5Nu -cm1fKzp4c2J+FU+ogJWSF5Ek76oYdo0E6RSB42LBAPxa/HjIBDPAR9nqJ9j5mhvT -4ZYb/1PuoBc4uwCG3DqbxZTjwT6TvIdLBkHMGkYw27TtCO7KksepUKzez5jwOXZv -oQCXjop3J2LbQ4NkASStidjv+zCJIO9Cl/G1izcgAWSyijb1lEtGnD1lguR7rraP -co27WJZ7aI/OtSCGM0ezSfOodT6Am6Bg6AnzsZ7OCHadbsmp7GubZLCyE0dQf3BF -c8cHqk44h18oCT4ieiuYBQ5H1MQWDwgCAj4Ji5DoiPhg8FTr36PIeaIARl5L/cWa -lRbv5IW1+LRvnLamOBJ0EncQvdt9ohIigZHpVrSV+f10WulM9pFvRNOKADl0qKbB -ylln62Qg/gNSt+VQOBH5AwBDm/PTyKkSzn9N0Rs4mnSiRyjjOPi3sle/d6zJNnUf -RILRFHcfMGeKS7m9GTOVRCyhDTP5wnBVYr6YXn+o96Cx85Uakp08nqCpQqUerGss -XW8o0aaWSjv3M1HVf4ceMgtAqWDCcpUuCrO+l3USFztsy/Y+yoP/kN0a4UCFn0Lf -ccrvQdHxJo9qfqGFSa1W7dlDYJKGDq2oSyREa5J00lmkroexhJUPIeY8PTKE/SC8 -jaaoFBG4PPiOVZhEgO6KtjiDqukrBoEgvu891wU3i1vhVxnFmnkpEKlZlnxnIHsJ -eHXSwhHwzXrBVp7osbgV9SkuIasxGL8WEgsjQviBuopvNXMUBjP711K4EJ/T8NZl -u7tAW8SuqTPdlk3XdyarE9gnZceVtyj8j3RmLV4tW8jfskYouXHDsiosURbT4TDV -xXenZXNvKLOL3SjYwZeae1kRACLq31dcIbdF/l/W84c1vapME6CEBIyhGdhQG5HS -ftnJqB+0bTBbguKCnnFuNv/thnXqF1SmyKJ4TXg5FSOumkIPmqhaQIueZMDz3/kX -X+IBh0T4x7C8TdbPOa3i0JI047jr6ML8yXUt4vG0aGPFAOeuyunvbiFU+ARVaTJ4 -W33TcIoTXW+nphl89cPcum4pdslO6qO+kcBrsR1hI/7aPia3NdujIctFunil4Ryz -xl8o2tJIpYSoehnJly2/ZCN0sk7cxmQVrY/KizsQX6cGHhFDLhRV4lvJpYoCfnxo -EP3rmLWQvjuzxr4GG/TG++8eo2WSZ/r+d/qbNvK1lRHqKhWlgHWbw9xlQc8wg4zd -K9pp2vIU9j2/cE3v/v1VjuTzomNGOl3wZa6soI1qi0q1kGqr7UOitqrfMccrZhdX -neogbgnIIYXjJQQHW5uvLJDTg3BhCmaJCNyi73+8QL3jASJMGpQIY8pEd91IuDVs -QiSU1sxV/BTdULUR3Zy4h/nwC+wHUxhZbunGCanPE+a8ZM+KM1j0xSR7qUafGwF8 -V7OeUhKowoNpJJMCt2r32cFJLLGqicjGr8Ir8U/8VR/g4XiNHOgUlIVca8OmME/T -6T0LJ86f78uQ/cApB4IoW0XOP6bZ60aRRuN/Laeu7dLglXqiP2PNyMq1kpKoKOPi -GZOnwlPer0uRVL2f9EpAH+L6qKM4/h9FD/6bs1xdADJR3PaDndm3SjwRJuzTL7Nx -vSKV6EuzHc5JDGil9QA9DtmzhxlIYoleIE+5VJsn01Sv9FAlFSPo3r5+GTtAoBrg -6xrg0irx7n4eTY52L6cK0Ml8HcS5ePCGNBbKF1w3bBpK8GoOrBevtW1mfTKgyaHU -7FLVf2z2ei5fFhlYdMyX1iUoOv97m6WBN9HMmYXmmKkFatN3xfMXFNYW24dNe8lT -oUojB+mJDrOFD6NJqR+LcHIRB25XTY4oEUC8navhEafcEQXaodEdfp4tjxBMVV/8 -UUuTjkIJQc9wB4ndIErP9jwGzfzX7NZaWfKYN5oLD0E52U16l9qlSKTJU0JP8XYD -nLxKZoLUP3KJE4a8mJOp9INOVWqdJNNfToD/gBMtcCg0HwFk5VIqgy2Q5QsaNE/k -vaCa+MORH+jPFJSRseKg7qvVGlieinsxT+ilNwtbmPjp+3uzoD0BIad4TJ1kKcab -5eXW2K7tb8IjRgq6l0A3GGlu3aIXE6IQiM5OdDZwiUPPp/X77mfK8KxcDkZTU+W/ -zwMwKisnVaG5jPC9TMAHnZapj9U8y5lsXaOgH7fY6Ov1VBwelyiL8qvVeNOo5HR6 -ag8l5qP7LOGRkoPESNftCfiJxBS17qN4nTCepp4Ku2w0VLrz9N9mEz2Ul2am4LoA -X5EjmqZnxwiLRtOKhEjnf382VXmb71QRLB59jKhmbBmOq/DZ5EEJyOZRIHDZ6f50 -JvTm1NJZNQ66ZE2elUrFbgYqArCzimKvj+INmOz7CICotSL+6xPPLwq+69w+mw5Y -jfNJ0T74W6q8yZcAooSlybxwOjCd3RmaV8Qo4eBc/ew3UYPP1Kd96C9qxsqTPSvi -iNZucGvTOQV70p3vSSzCv0Doyi+mrTcrE9UITD01urx5zWPrvn9a8hhvtYtLYkOs -5kWGqgWLu/pvT+dD5jKU0yh6az90j/b/g7fjk1vVO2XHaTYs7gO60hSUWfPKC6W6 -cqeywNPcnv+bYZtoUwpdOLI4S7UwLTOrFxF3qjr0CV8vSzk0PuBIrTr0Xpwx1Q9z -NyJNInrZAEXiabxqruFHziylACSXsNXuCFo4FtpZkpmx4sABfNn1BvwF6lemy6iV -sH0I9mXtsao7NodERGrHSGjyDaohFlgmUXrIzypDcnGc32XoE7+AvYAiYgAOdzX/ -7rRzTDTlTv5ss9fHs2jtKSPERNx124Mj2K9VX7E6KGuy193cZyzhAEmrYKOaKOY1 -5jyX2hiQleB1xjOWg4cNhZTdEoGP4DTcQ13Egx6khUBALTTJhcJU/FX0JQJb7kCp -7/A83zaNAkcHzc4FdnhF08VECQRn/ZUvU2e342AsmxZv9opI7CSD9UjfCXYvfXnN -od7wrLGsBUChrVHLDGst22GM3vIFwC6gAKvCYrf3WL8wFO3KJGXT8AyGHijV5OyG -X7OeFHJCWoKU63G24FGHm4rKRW9S0mn1sxYVfhf5THjJXs8RpsUsH3+G6d3paSb7 -v58VAEkW2p2dyLA2cjqHLaaGJBgYuqyb/hP2kVu+lUMnzcyabncrcq0g0anvrA4F -R+WdNtV6KvWYToiOBQcpl0176f/11dGU/sWtGYkbYF8JXhf0GUVadM4VAC6BCWtG -fC4TTN0FuL1ZJjRVf3mLGOTQfPzag8Y0b2JxvdQo5dJ5BSZfB42LFj2VEQhKcAM6 -rj9/rJj0TcsG2B8wSrJn8KpvyIXTk8CluxMRSZszYOF44EitBv/gldotIS2uTWNN -8t1dQvHGoV/71U1QfzQW1ovg5B4HGIg3z21ju45bbQYvR2Ay8//vjHQmfvyE/PKN -a2JnX9tf6ElaAkr6/mrvskafHTktE+ttKQwBkwgfFUu5hX74wcKY1/JCq9wOujA5 -HP9xsGpKB/U+M2MHWuCqhCx7hBYeb/9/iJuFLFAPJfr/X3cJq7LRqPNmAJRQd5nf -yZod1fQ4AS0VM2bWeN1+pqDuZlGu5bOJ3S+9/B1AvE+QgAXWWNf6dwS2KvbiHdih -y0EYjldzN+fY1UMt+z7djTq51sdBg/1dS4xxsu8Bxdud1yf0su1TsENfkDI3SsP+ -V8QG0/FjQcA8ajCJSLNo72SJ/vH6QxtQ5fcIbb26vdAPY9ar5/Db2nypeGABpZnF -BNyBhqj95iO3JZf1fbsBdqFPK56NNDVuWqLMSFi9nYBuEeetnY82FNTvlae/Otos -qx5LN0rKoD4q/nLS01lrCOJRv43g3vdPZA3DvQaGO5rt4bkFibWf8bZ2yetHhIIa -dXKfi9tRkTXzYFZlT44DSf439CrVrOf6B8wuy4K2xHWft1jIMVnRuXBjY9fynVUd -ulMMPTi9WGvrwSPgGRlql4i7wXQv/efCrZNgQJnxmNOHofvs6ju8VKRh9jiTBOrd -ei3bOspnIS/kuCylxQUgJe8u+JeRJu+YahVShoCrW2MtiDFP/tYxOZmKVYf1/k4j -aHpulKoukPlr1QlnY8CDaEDXk9S+ZnPjxLjbvgBQaIVrxJj1OQteJK6S04fOjF2j -epyEJJmoFqL1FVVhOKhVBh0M/hq18CN9rCicpVfUVjIRe8zMf97HcfVeSUQ9G9RA -2TVl5qCL16B2SS4LD+41+6kHiKkuiHqXRKekUqDQCFOp1J10thiZWF15EA6ms+39 -ab54P+ZmfzNERyvzTTc8bgEhiepB1y0YrgG6U6MuaYpp+Jq9TPwkAVqkaJbe3vtO -21gUdQFW3mzUlGhWnCDJShKiCPwtF6zYfwcG4yERxoNUYCm+JYnRgRIpMQXyARyk -inbmp1mc+DJY1CrfQu02mrcvRDopfndyxruIt/RKdTpFqh8VFp8H0fCQWJdfy4Gr -zSx8rRZBxL7I9w7tWXXWuRcaZGAUEAFZYM4PN1aAcpwXt3rePli5Wr9Lg/5dFLyk -fbhaswP2i/x3HZlOZB5xb25mW+c7PeSGbx8A0zrGS+/oRy7Zy4ONZ2cNy2PEg0x+ -CimyDQqdrnkA8r4EYndlprsKhq9r5VSuzgfZwTYHqAzPkWLDGbBH1NC2vBM0Jwz7 -5y4dctP39I7s2K+E31+xVTOlpY7xhhvXjERmjIOKQ+dCNP87ay3fxd4pmM73i7L5 -qFkvNdgDe3bIfOsoiXFpM462cAyYiX9otH+eyvE3O5CTbYO3jLA7DhnYttGB2XRz -Povj8ODHY6F8P8HNVPU0wl18f/cqvdkhrndOfAocOdPKTUgmw1OW6r8gw/AeAoY9 -q49plnYTPKRNyk0u+kKmcudzMpzvzbE+F4t4iTYWI3NzdUNdljPcpKz6PegYw9yU -DtXYS85a/PSLfaAuoWHh7HkLL34Oles2a/7afyKRaLdefoMlTfMGAJn6smy3wVTK -APr5Rl2PZkhVDEeslB/vAd7We7oYo7JVyrpT2NOT9kYxCmvONnFu1jDJ3x3vg12I -idfOnfNTmhxab1Zsil7aIoqWgE4UQh5CsbChzGDcMZCGFbnP3hxRVPxFR6GCfSVP -Ja6W0ZWZCK1cXkayz9PywlQ5JdJKmjdH1qHci8arBPp89OTL9CL9k+bOPTvp5UvD -3KMEaxoumwsrCw92OO3nptHLXv2mcaFGC/Y1YEvvv0Bpb4lD56Kv/6Uizv5tud2E -0WWY63fJ+31C9pweMAZVMEHZwSI1iq5fgNfFGMnS1h8dxJLolozEgHZY8lTaL4fx -BazgRGbsEX/Qb93Ld91ZXXXLgMcBNN5shsa7K/IsmMUGWtiUzlUi4tBIit5QmOvj -ICQ7UxkzLE/LGat/8tk8823NSrLHMTetjhpHY8PTQf497E+rAnIvcHlZok3HTiHX -YMxGSJxOJI2ff0x3byiwZPf/dgZwK3/LDn5ck4LZx1qXdY6/3Dg6vFxCP4avJdnj -rpJk5BstSwca7l8d7PwfAVz9tKYxzhSBDcnk06o728KNfdYALy6h05jSbxq3d6Gm -0+Mh+ydr9E9uRT913TOVcsyXVwRrIHqBOJfxlyG/0HYKpN36WfXMDPeDZAzZ6Xbq -7SPjjVhEyPCK/gCOHDriBppWHDn/GhGwS8Jv+fWiSyLmqSSXtgO1oxrGo19ugV1S -K3or6AbePviDUkk/1NRDokMyHeE7TWZmzllmiZ2d2rwI+/+l43zWitStRW7xH9Nd -YFo343oWHkX/se1jB3EKHmSmDmLa4etmKXN1oIxzl7lRedRMfIb8RtqZ2lX0pcw8 -UwRcVAe6tMqRvUNMArZO55AUJ/3PyFL0m/OaxUqDt8JzsM2i4V4qfnoELo8vGmzq -UWYyDlcpPBznvEPRGa6mDq5359VIFAQGlySpXmFIwoTH3EkirY3j7DnmFC1d5kvM -+JggJElqkY/QUM0BrXrmUEzKH2AxjfffUKJNBrufTnkzyJLxasUbBO5f5wiIMLXU -IlEZaepep5tcgIkQsL5Kff9p5vIL2IrikhmdYCX9Npy8g3Ks+18mPXGb5wG2rgts -ZzDg7y0tyjMeLM7YZzHDjMD4qqIOmirEgfHxEGgLYn+fW4N1JiftzmU8dH9Z/Edr -xnNT+uxlNboT3e/QMD00/xDsI4vl8Wknk3YQEGvD6a89sBUBkEg4sAUMA+WZfaUP -Z/D8M9vhDsR1V70LtOWA6fld07az6JBer+A8M1Wq55hjVrR47DNa4xy3CeTxqLji -Yk3Dp4KAWJMAR14i2aLWTSshwkdHg8Sy1adCvrR/NdwOiBQq3uUAuA8jiCvSxQTu -LVU/atAAtOFRIGWitiDDn5l2Zyqqeo4dgHlB51cNvv0xRc0csi/ijddguEX+Ok10 -xbhylmCKDh3h7Lh1fA7kTdfW6joTSnIvJHJR4BRU7+41bLkxWZfLpkRivvFGKFm/ -lBx2HPZo9DAa5owqkNb0NzINWwSlgR3NQ8wNGmjHsrKiHGzi3vgNlYvlX653X71l -2n/5iDmmPR0uxJJm2nux2BhzYHr8rT09FLJOElSJLv5yDKkrgkNm9HNdFB6ZDsEw -as3rz31RjCnjp6SRUcw/+cjVrPklN/CFH3t44OAUnF+zGCrMAZ2Gvu7pStnYEf4q -cpLyw/KxCJSwOrwp4+Oe08y+6YH8ja+HXzUlsIrtJRaBdKnXbzUNX43Hz/0Ku2ov -D71ID06Oj26FWpE0EzfAKzPXxwKOMOK0I588v1EzBxMPMAkX5vKNv1ibs0V1whYl -ZdQd4Cyudq0NPSzr9TGRlCd6i4YsNqfkvXqPjwuXTNxzLZTxhUFIrkOwFKZ3UzeD -dLaksE4Aknpe699BHwfbDDk5Sb5tXl0VHZvSoCSckLHryULXysINJKEWbPDiap1S -nOxpJUTd5FI6bOVM1UzHtg4E+M7n1+zCIjKRi+1JFKTzvzKJEWyhIwaSdzyCScI3 -Pl7DsleNI9cnAObdHSg/kZpqJyO1NUgHm3X1KXoI3A/NtmCOfIcg1vceur5G0ZiY -SWEYvKgqNqZ+FpxgQuTt1hXkyaLyvFs1k54MTurPE3ht3oZP/FvDP0YS9W7U3yIB -5CrRWMOKq8j91ollzBwCPuGQ7+TSSHVVJafkYsAQdVe5y5rdMcWfHLN3U9hDQnlJ -jSii/4+AtuUR95BVojtIaw/FcfH+LS1Wnersy3SGNJ2j0wMSwy9oqFAMdWYFGjZU -iMNMt3BFxAaQwLOz+WWAFh6PMissdM5B5OLzYxZ7gQ+0ohYp7pO+snwfQIQHjzJM -CC33CqOjiB1bJP0tCnPUCidXwuqHn78o8hzesexx9HRbtScdZehj5R0ccyq5yY4C -qAMj6mTvrQ2/EKalfMnFS+UmGyD9W+ZkgMF62HIh+0x2Kce4e32mkWji9MRFBtL9 -Yhn6qMciA8noDdaQb/lnDgI/kBMNUSFsQcCynkHffvRWumaSm2+e55Mga8LpLpMb -47JcYBhcNn02S8znhP15z3b96SkRh7xHAGc5ALpIy/k8GNjr/b/bWACy2npliLQz -GFqwMhRSNmiKZ4v1CPaGE+2/Dy5DWpq/7sobQFNCOnnDO/BYX5/vZYzLZF37chSd -FaPZa/0pXiGr3z3O7xZGx9rLon2TgITzoHIxw/vSNt5saI+/iyeGfmN7EnfTzNfd -Iy9DTS5FqVddAdFtPc407zb8AsuRVW5Hw38Fri4B1Vu/JflJPfIGr2HIgll4wsFN -JKtzYcx34/Xldak4wNfPIlj/UoQ3zFjj5Ov/01MDO3cafvoL5l313W/g2v9k+J5k -iLA8gu693kAqH6zL3Zn0jCS7aIoTdN8P49GHs5xGAMR3n65Kw/Ow+9v8KrQ6JRk3 -ugMqqg6wsC07SiJ+zJOsN2HnYCX4xhI8RnDixzdYxWx7kmtMbgLedzhafgMoHx+v -E3vWnDegioTGMuoIjFGpxu/3PpL+tkHypx8AWz0PLumYsj+8KlV4haNBu6v6w1Q0 -rtkZ3NE6ywu82mVrMiD+wUOx5F0cqBp8IUOzMdUmJmz+NQxepBSXLJySFqHOBUGh -yRvVLtVefdg3UqyW2oiL3jNRUksZmZDEcM9djhWNJE7wmbIAoE/gH5fWulavMvp+ -3MVdS0KXmQGXiXqK4VF4yspoSdPsmG8VnZO3YRH+FEJPzjV8oN0LaAMAHNrrw+YN -4j/V4pJkVSdEYVyMJq1w+rICEds1KG6XGngryyh6OlR6kdQDzcUDm29IY8Ml603K -/LeG1roUjniL62u1UeZngyZilY4bi1FETg7+ckCwfmAwLyH8SJFEvmpPK/H2NrF9 -w/AE+QHTL8BNDbM4NBgqmqfrKggFFf/eFE7AxrUceMZZXBfG9N8DfhOiyt70JjSj -9+UjBeRLuZ/JhjrKo01bFPjtbQFoy/2yo2IqYPYCo8G6VN2y/qQKHLs7IQ/zyShQ -XNhPezFO3P0wpw4QpDfkQJVyrCEzoEohmlCoiSelFgMhHywvFowLA3xHNM519O7i -ZROF16uDE3qcOcIPQA4Me4g5ZCM8aouWwRbV45zpRMV4gnMoCBp4VUIrnkXQmfHv -hlV5uZZE9PB5Ms6Xb9GPRbpFkTbFXaan2PoetESI+cfw3HtjSdUv2w== From 8d89965277d92f76ba1926238ca102f0a3ab6fa7 Mon Sep 17 00:00:00 2001 From: ddimatos Date: Sun, 26 Feb 2023 18:11:28 -0800 Subject: [PATCH 026/495] update profile created for mount points Signed-off-by: ddimatos --- scripts/profile-shr | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/scripts/profile-shr b/scripts/profile-shr index 22da6d2f7..c827f3037 100755 --- a/scripts/profile-shr +++ b/scripts/profile-shr @@ -25,26 +25,26 @@ bash_enabled=false ################################################################################ zoau_choice () { case "$1" in - [a]* ) zoau_version="/zoau/v1.2.0";; - [b]* ) zoau_version="/zoau/v1.0.0-ga";; - [c]* ) zoau_version="/zoau/v1.0.1-ga";; - [d]* ) zoau_version="/zoau/v1.0.1-ptf1";; - [e]* ) zoau_version="/zoau/v1.0.1-ptf2";; - [f]* ) zoau_version="/zoau/v1.0.2-ga";; - [g]* ) zoau_version="/zoau/v1.0.3-ga5";; - [h]* ) zoau_version="/zoau/v1.0.3-ptf2";; - [i]* ) zoau_version="/zoau/v1.1.0-spr";; - [j]* ) zoau_version="/zoau/v1.1.0-spr5";; - [k]* ) zoau_version="/zoau/v1.1.0-spr7";; - [l]* ) zoau_version="/zoau/v1.1.0-ga";; - [m]* ) zoau_version="/zoau/v1.1.1-ptf1";; - [n]* ) zoau_version="/zoau/v1.2.0f";; - [o]* ) zoau_version="/zoau/v1.2.1";; - [p]* ) zoau_version="/zoau/v1.2.1-rc1";; - [q]* ) zoau_version="/zoau/v1.2.1g";; - [r]* ) zoau_version="/zoau/v1.2.1h";; - [s]* ) zoau_version="/zoau/v1.2.2";; - [t]* ) zoau_version="/zoau/latest";; + [a]* ) zoau_version="v1.2.0";; + [b]* ) zoau_version="v1.0.0-ga";; + [c]* ) zoau_version="v1.0.1-ga";; + [d]* ) zoau_version="v1.0.1-ptf1";; + [e]* ) zoau_version="v1.0.1-ptf2";; + [f]* ) zoau_version="v1.0.2-ga";; + [g]* ) zoau_version="v1.0.3-ga5";; + [h]* ) zoau_version="v1.0.3-ptf2";; + [i]* ) zoau_version="v1.1.0-spr";; + [j]* ) zoau_version="v1.1.0-spr5";; + [k]* ) zoau_version="v1.1.0-spr7";; + [l]* ) zoau_version="v1.1.0-ga";; + [m]* ) zoau_version="v1.1.1-ptf1";; + [n]* ) zoau_version="v1.2.0f";; + [o]* ) zoau_version="v1.2.1";; + [p]* ) zoau_version="v1.2.1-rc1";; + [q]* ) zoau_version="v1.2.1g";; + [r]* ) zoau_version="v1.2.1h";; + [s]* ) zoau_version="v1.2.2";; + [t]* ) zoau_version="latest";; * ) echo "" usage ;; From f04e2c908b5af765180d8585966b0b7c693593e8 Mon Sep 17 00:00:00 2001 From: Demetri Date: Mon, 27 Feb 2023 10:23:49 -0800 Subject: [PATCH 027/495] Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos * Update changelog fragments Signed-off-by: ddimatos --------- Signed-off-by: ddimatos --- .../588-zos_copy-emergency-backup.yml | 5 ----- .../588-zos_copy-emergenxy-backup.yml | 6 ------ .../fragments/599-copy-carriage-return.yml | 4 ---- .../600-zos_copy-special-characters.yml | 4 ---- .../fragments/601-copy-loadlib-member.yml | 3 --- .../fragments/602-text-converter-import.yml | 6 ++++++ changelogs/fragments/627-all-modules.yml | 19 ------------------- .../647-zos_data_set_record_format.yml | 6 ------ .../fragments/648-zos_operator-examples.yml | 4 ---- .../fragments/650-doc-meta-data-updates.yml | 11 ----------- .../fragments/659-zos-lineinfile-f-string.yml | 8 ++++++++ .../enhancement-518-text-converter-import.yml | 3 --- plugins/modules/zos_lineinfile.py | 2 +- 13 files changed, 15 insertions(+), 66 deletions(-) delete mode 100644 changelogs/fragments/588-zos_copy-emergency-backup.yml delete mode 100644 changelogs/fragments/588-zos_copy-emergenxy-backup.yml delete mode 100644 changelogs/fragments/599-copy-carriage-return.yml delete mode 100644 changelogs/fragments/600-zos_copy-special-characters.yml delete mode 100644 changelogs/fragments/601-copy-loadlib-member.yml create mode 100644 changelogs/fragments/602-text-converter-import.yml delete mode 100644 changelogs/fragments/627-all-modules.yml delete mode 100644 changelogs/fragments/647-zos_data_set_record_format.yml delete mode 100644 changelogs/fragments/648-zos_operator-examples.yml delete mode 100644 changelogs/fragments/650-doc-meta-data-updates.yml create mode 100644 changelogs/fragments/659-zos-lineinfile-f-string.yml delete mode 100644 changelogs/fragments/enhancement-518-text-converter-import.yml diff --git a/changelogs/fragments/588-zos_copy-emergency-backup.yml b/changelogs/fragments/588-zos_copy-emergency-backup.yml deleted file mode 100644 index 393a0f50d..000000000 --- a/changelogs/fragments/588-zos_copy-emergency-backup.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - fixed wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system - to its initial state in case of a module failure only when force is false. - (https://github.com/ansible-collections/ibm_zos_core/pull/590) diff --git a/changelogs/fragments/588-zos_copy-emergenxy-backup.yml b/changelogs/fragments/588-zos_copy-emergenxy-backup.yml deleted file mode 100644 index 752131ddc..000000000 --- a/changelogs/fragments/588-zos_copy-emergenxy-backup.yml +++ /dev/null @@ -1,6 +0,0 @@ -bugfixes: - - zos_copy - fixed wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system - to its initial state in case of a module failure only when force is false. - (https://github.com/ansible-collections/ibm_zos_core/pull/590) - diff --git a/changelogs/fragments/599-copy-carriage-return.yml b/changelogs/fragments/599-copy-carriage-return.yml deleted file mode 100644 index 6e61ded4a..000000000 --- a/changelogs/fragments/599-copy-carriage-return.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_copy - fixes a bug where the computed record length for a new destination - dataset would include newline characters. - (https://github.com/ansible-collections/ibm_zos_core/pull/620) diff --git a/changelogs/fragments/600-zos_copy-special-characters.yml b/changelogs/fragments/600-zos_copy-special-characters.yml deleted file mode 100644 index 3eb9c4247..000000000 --- a/changelogs/fragments/600-zos_copy-special-characters.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_copy - fixes a bug where if a destination has accented characters in - its content, the module would fail when trying to determine if it is empty. - (https://github.com/ansible-collections/ibm_zos_core/pull/634) diff --git a/changelogs/fragments/601-copy-loadlib-member.yml b/changelogs/fragments/601-copy-loadlib-member.yml deleted file mode 100644 index bd704d41d..000000000 --- a/changelogs/fragments/601-copy-loadlib-member.yml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: -- zos_copy - fixes a bug where copying a member from a loadlib to another - loadlib fails. (https://github.com/ansible-collections/ibm_zos_core/pull/640) \ No newline at end of file diff --git a/changelogs/fragments/602-text-converter-import.yml b/changelogs/fragments/602-text-converter-import.yml new file mode 100644 index 000000000..24f719c26 --- /dev/null +++ b/changelogs/fragments/602-text-converter-import.yml @@ -0,0 +1,6 @@ +minor_changes: +- Updated the text converter import from "from ansible.module_utils._text" + to "from ansible.module_utils.common.text.converters" to remove + warning".. warn Use ansible.module_utils.common.text.converters instead.". + (https://github.com/ansible-collections/ibm_zos_core/pull/602) + diff --git a/changelogs/fragments/627-all-modules.yml b/changelogs/fragments/627-all-modules.yml deleted file mode 100644 index 9d7cec183..000000000 --- a/changelogs/fragments/627-all-modules.yml +++ /dev/null @@ -1,19 +0,0 @@ -trivial: - - Update documentation to use link L(...) over M(...) meta. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Update modules such doc defaults match module defaults. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Initialize variables to meet linting requirements. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Remove unused global vars. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Update Makefile tooling. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Remove requirements.txt because it is maintained in the pipeline. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Complete Ansible certification for versions 2.12, 2.13, 2.14 and 2.15. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Update and add certification files ignore.txt. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) - - Update test cases with added checks. - (https://github.com/ansible-collections/ibm_zos_core/pull/627) \ No newline at end of file diff --git a/changelogs/fragments/647-zos_data_set_record_format.yml b/changelogs/fragments/647-zos_data_set_record_format.yml deleted file mode 100644 index 1f26a0f5d..000000000 --- a/changelogs/fragments/647-zos_data_set_record_format.yml +++ /dev/null @@ -1,6 +0,0 @@ -bugfixes: -- zos_data_set - fixes a bug where the default record format FB was actually - never enforced and when enforced it would cause VSAM creation to fail with a - Dynalloc failure. This also cleans up some of the options that are set by - default when they have no bearing for batch. - (https://github.com/ansible-collections/ibm_zos_core/pull/647) diff --git a/changelogs/fragments/648-zos_operator-examples.yml b/changelogs/fragments/648-zos_operator-examples.yml deleted file mode 100644 index bb6e4d29a..000000000 --- a/changelogs/fragments/648-zos_operator-examples.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_operator - fixed incorrect example descriptions and updated the doc to - highlight the deprecated option `wait`. - (https://github.com/ansible-collections/ibm_zos_core/pull/648) diff --git a/changelogs/fragments/650-doc-meta-data-updates.yml b/changelogs/fragments/650-doc-meta-data-updates.yml deleted file mode 100644 index 46827405a..000000000 --- a/changelogs/fragments/650-doc-meta-data-updates.yml +++ /dev/null @@ -1,11 +0,0 @@ -trivial: - - Update meta/* files to reflect release content and version, issue 433. - (https://github.com/ansible-collections/ibm_zos_core/pull/650) - - Update community docs with Ansible version support statement, issue 630. - (https://github.com/ansible-collections/ibm_zos_core/pull/650) - - Update documentation to align to corporate wording, issue 649 - (https://github.com/ansible-collections/ibm_zos_core/pull/650) - - zos_job_output - Update documentation to remove unicode text in doc, issue 651. - (https://github.com/ansible-collections/ibm_zos_core/pull/650) - - zos_operator - update documentation and examples, issue 390. - (https://github.com/ansible-collections/ibm_zos_core/pull/650) \ No newline at end of file diff --git a/changelogs/fragments/659-zos-lineinfile-f-string.yml b/changelogs/fragments/659-zos-lineinfile-f-string.yml new file mode 100644 index 000000000..bd5e0b269 --- /dev/null +++ b/changelogs/fragments/659-zos-lineinfile-f-string.yml @@ -0,0 +1,8 @@ +bugfixes: +- zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed + to ensure support for Python 2.7 on the controller. + (https://github.com/ansible-collections/ibm_zos_core/pull/659) +trivial: +- Remove changelog fragments no longer needed as they are already recorded in + the prior version of IBM z/OS Core. + (https://github.com/ansible-collections/ibm_zos_core/pull/659) \ No newline at end of file diff --git a/changelogs/fragments/enhancement-518-text-converter-import.yml b/changelogs/fragments/enhancement-518-text-converter-import.yml deleted file mode 100644 index 691a57273..000000000 --- a/changelogs/fragments/enhancement-518-text-converter-import.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning ".. warn:: Use ansible.module_utils.common.text.converters instead.". - diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index e72bfc6b1..7a26ce299 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -424,7 +424,7 @@ def main(): # Check if dest/src exists if not ds_utils.exists(): - module.fail_json(msg=f"{src} does not exist") + module.fail_json(msg="{0} does not exist".format(src)) file_type = ds_utils.ds_type() if file_type == 'USS': From 6b54f3e07fd0bad34205c3f1a09d45c1e98e05d7 Mon Sep 17 00:00:00 2001 From: ddimatos Date: Sun, 12 Mar 2023 23:58:53 -0700 Subject: [PATCH 028/495] Updated shell scripts for development tooling Signed-off-by: ddimatos --- scripts/mounts.sh | 70 +++++++++++ scripts/profile.sh | 287 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 357 insertions(+) create mode 100644 scripts/mounts.sh create mode 100755 scripts/profile.sh diff --git a/scripts/mounts.sh b/scripts/mounts.sh new file mode 100644 index 000000000..0fcfecb38 --- /dev/null +++ b/scripts/mounts.sh @@ -0,0 +1,70 @@ + # ============================================================================== + # Copyright (c) IBM Corporation 2023 + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # http://www.apache.org/licenses/LICENSE-2.0 + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # ============================================================================== + + # ============================================================================== + # KSH (Korn Shell) Array of mounts index delimited by " ", etries delimited by ":" + # More on ksh arrays: https://docstore.mik.ua/orelly/unix3/korn/ch06_04.htm + # This `mounts.sh` is sourced by serveral other files, only these lists needs to + # be maintained. + # ============================================================================== + + # ------------------------------------------------------------------------------ + # zoau_mount_list[0]=":::" + # e.g: zoau_mount_list[0]="1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" + # Format: + # index - used by the generated profile so a user can select an option + # version - describes the option a user can select + # mount - the mount point path the data set will be mounted to + # data_set - the z/OS data set containing the binaries to mount + # ------------------------------------------------------------------------------ + set -A zoau_mount_list "1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" \ + "2:1.0.0-ga:/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS" \ + "3:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS" \ + "4:1.0.1-ptf1:/zoau/v1.0.1-ptf1:IMSTESTU.ZOAU.V101.PTF1.ZFS" \ + "5:1.0.1-ptf2:/zoau/v1.0.1-ptf2:IMSTESTU.ZOAU.V101.PTF2.ZFS" \ + "6:1.0.2-ga:/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS" \ + "7:1.0.3-ga5:/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS" \ + "8:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS" \ + "9:1.1.0-spr:/zoau/v1.1.0-spr:IMSTESTU.ZOAU.V110.SPRINT.ZFS" \ + "10:1.1.0-spr5:/zoau/v1.1.0-spr5:IMSTESTU.ZOAU.V1105.SPRINT.ZFS" \ + "11:1.1.0-spr7:/zoau/v1.1.0-spr7:IMSTESTU.ZOAU.V1107.SPRINT.ZFS" \ + "12:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS" \ + "13:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS" \ + "14:1.2.0f:/zoau/v1.2.0f:IMSTESTU.ZOAU.V120F.ZFS" \ + "15:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS" \ + "16:1.2.1-rc1:/zoau/v1.2.1-rc1:IMSTESTU.ZOAU.V121.RC1.ZFS" \ + "17:1.2.1g:/zoau/v1.2.1g:IMSTESTU.ZOAU.V121G.ZFS" \ + "18:1.2.1h:/zoau/v1.2.1h:IMSTESTU.ZOAU.V121H.ZFS" \ + "19:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS" \ + "20:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS" + + # ------------------------------------------------------------------------------ + # python_mount_list[0]=":" + # python_mount_list[0]="/python2:IMSTESTU.PYZ.ROCKET.V362B.ZFS" + # ------------------------------------------------------------------------------ + set -A python_mount_list "/python:IMSTESTU.PYZ.ROCKET.V362B.ZFS" \ + "/python2:IMSTESTU.PYZ.V380.GA.ZFS" \ + "/python3:IMSTESTU.PYZ.V383PLUS.ZFS" \ + "/allpython/3.10:IMSTESTU.PYZ.V3A0.ZFS" \ + "/allpython/3.11:IMSTESTU.PYZ.V3B0.ZFS" \ + "/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS" + + # ------------------------------------------------------------------------------ + # python_path_list[0]="::" + # python_path_list[0]="1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz" + # ------------------------------------------------------------------------------ + set -A python_path_list "1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz" \ + "2:3.9:/python2/usr/lpp/IBM/cyp/v3r9/pyz" \ + "3:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz" \ + "4:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz" + diff --git a/scripts/profile.sh b/scripts/profile.sh new file mode 100755 index 000000000..4a10fd3bd --- /dev/null +++ b/scripts/profile.sh @@ -0,0 +1,287 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ------------------------------------------------------------------------------ +# If the current shell is bash, exit it because the ported rocket shell misbaves +# when VI'ing scripts and this script is specifically written to Korn Shell (ksh) +# ------------------------------------------------------------------------------ +CURR_SHELL=`echo $0` + +if [ "$CURR_SHELL" = "bash" ]; then + # Have not found a good way to exit the bash shell without ending the profile + echo "This script can not run in a bash emulator, exiting bash and and thus"\ + "you must exit this profile again." + exit 1 +fi + +# ------------------------------------------------------------------------------ +# Source the known mount points +# ------------------------------------------------------------------------------ +. ./mounts.sh + +################################################################################ +# Global vars - since ksh is the default shell and local ksh vars are defined +# with `typeset`, e.g. `typeset var foo`, I don't want to script this solely for +# ksh given there are othe ported shells for z/OS. +################################################################################ +ZOAU_INDEX="" +ZOAU_VERSION="" +ZOAU_MOUNT="" +ZOAU_DATA_SET="" + +PYTHON_INDEX="" +PYTHON_VERSION="" +PYTHON_PATH="" + +BASH_SELECTED=false + +# ****************************************************************************** +# Search the array `zoau_mount_list` for a matching arg, if it matches set the +# global zoau_version var to the zoau version. +# ****************************************************************************** + +get_option_zoau(){ + + arg=$1 + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + for tgt in "${zoau_mount_list[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + if [ "$zoau_index" = "$arg" ]; then + ZOAU_INDEX="$zoau_index" + ZOAU_VERSION="$zoau_version" + ZOAU_MOUNT="$zoau_mount" + ZOAU_DATA_SET="$zoau_data_set" + fi + done +} + +get_option_python(){ + + arg=$1 + unset python_index + unset python_version + unset python_path + for tgt in "${python_path_list[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_path=`echo "${tgt}" | cut -d ":" -f 3` + + if [ "$python_index" = "$arg" ]; then + PYTHON_INDEX="$python_index" + PYTHON_VERSION="$python_version" + PYTHON_PATH="$python_path" + fi + done +} + +get_option_shell(){ + + arg=$1 + case "$1" in + [yY][eE][sS]|[yY]* ) BASH_SELECTED=true;; + esac +} + +################################################################################ +# User input for Python +################################################################################ +help_option_zoau(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + echo "" + echo "ZOAU Options:" + for tgt in "${zoau_mount_list[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + echo "\t[${zoau_index}] - ZOAU ${zoau_version}" + done +} + +help_option_python(){ + unset python_index + unset python_version + unset python_path + echo "Python Options:" + for tgt in "${python_path_list[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_path=`echo "${tgt}" | cut -d ":" -f 3` + echo "\t[${python_index}] - Python ${python_version}" + done +} + +help_option_shell(){ + echo "Bash shell:" + echo "\t[Y/N] - Default no." +} + +usage () { + echo "" + echo "Usage: $0 [1-n] [1-n] Y/N" + echo "Example: $0 12 1 Y" + echo "Default: $0 19 2 N" + help_option_zoau + help_option_python + help_option_shell +} + +################################################################################ +# Message to user +################################################################################ +selected_option () { + echo "Using ZOAU version `zoaversion`" + echo "Using python version `python --version`" + if [ "${BASH_SELECTED}" = true ]; then + echo "Bash is enabled." + fi +} + +################################################################################ +# Configure all exports +################################################################################ +set_exports (){ + + export PATH=/bin:. + + ################################################################################ + # Set the ported tools directory on the EC, see the tools you can use, eg: + # vim, bash, etc + ################################################################################ + export TOOLS_DIR=/usr/lpp/rsusr/ported + export PATH=$PATH:$TOOLS_DIR/bin + + ################################################################################ + # Set the editor to VI + ################################################################################ + export TERM=xterm + + ################################################################################ + # Standard exports used in EBCDIC/ASCII conversion needed by tools like pyz/zoau + ################################################################################ + export _BPXK_AUTOCVT='ON' + export _CEE_RUNOPTS='FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)' + export _TAG_REDIR_ERR=txt + export _TAG_REDIR_IN=txt + export _TAG_REDIR_OUT=txt + export LANG=C + + ################################################################################ + # Set Java + ################################################################################ + export JAVA_HOME=/usr/lpp/java170/J7.0 + + ################################################################################ + # Configure Python + ################################################################################ + export PYTHON_HOME=$PYTHON_PATH + export PYTHON=$PYTHON_HOME/bin + export LIBPATH=$PYTHON_HOME/lib:$LIBPATH + + ################################################################################ + # ZOAU 1.0.2 or or earlier ueses ZOAU_ROOT and not ZOAU_HOME + ################################################################################ + export ZOAU_HOME=${ZOAU_MOUNT} + export PATH=$ZOAU_HOME/bin:$PATH:$PYTHON:$JAVA_HOME/bin:$TOOLS_DIR/bin + export MANPATH=$MANPATH:$TOOLS_DIR/man + export ZOAU_ROOT=${ZOAU_HOME} + export PYTHONPATH=${ZOAU_HOME}/lib/:${PYTHONPATH} + export LIBPATH=${ZOAU_HOME}/lib:${LIBPATH} + + ################################################################################ + # Custom terminal configurations + ################################################################################ + # Append home directory to the current path + export PATH=$PATH:$HOME: + + # Set the prompt to display your login name & current directory + export PS1='[ $LOGNAME':'$PWD':' ]' + + alias python="python3" + alias pip="pip3" +} + +set_bash (){ + ################################################################################ + # Run bash shell: + # I have have seen many issues using this version of bash to edit files on the + # EC, for example of you edit your .profile with VI under BASH, it will render + # unreable, for times I have to edit, I type exit it defaults be back into + # the zos_ssh shell which does not have any issues with VI or editing files. + # I generally use bash only for history and running commands. + ################################################################################ + if [ "${BASH_SELECTED}" = true ]; then + bash; + fi +} +################################################################################ +# Main +################################################################################ +# User enters choices for zoau, python and bash +if [ $# -eq 3 ];then + get_option_zoau $1 + get_option_python $2 + get_option_shell $3 + set_exports + selected_option + set_bash +# User enters choices for zoau and python, bash defaults to false +elif [ $# -eq 2 ];then + get_option_zoau $1 + get_option_python $2 + get_option_shell false + set_exports + selected_option + set_bash +# Default zoau 1.2.2 and python 3.9 +elif [ $# -eq 0 ]; then + get_option_zoau 19 + get_option_python 2 + get_option_shell false + set_exports + selected_option + set_bash +elif [ "$1" = help]; then + usage +else + usage +fi + + +# Source should have array mount_list +xxxx(){ + unset index + unset name + unset mount_point + unset data_set + for tgt in "${zoau_mount_list[@]}" ; do + index=`echo "${tgt}" | cut -d ":" -f 1` + name=`echo "${tgt}" | cut -d ":" -f 2` + mount_point=`echo "${tgt}" | cut -d ":" -f 3` + data_set=`echo "${tgt}" | cut -d ":" -f 4` + mkdir -p ${mount_point} + echo "Mouting ZOAU ${name} on data set ${data_set} to path ${mount_point}." + /usr/sbin/mount -r -t zfs -f ${data_set} ${mount_point} + done +} \ No newline at end of file From ab645ffe8697f320c14862ee0506f845c27c96c0 Mon Sep 17 00:00:00 2001 From: ddimatos Date: Thu, 16 Mar 2023 13:37:34 -0700 Subject: [PATCH 029/495] Add issue template updates Signed-off-by: ddimatos --- .github/ISSUE_TEMPLATE/bug_issue.yml | 66 ++++++++++++----- .../ISSUE_TEMPLATE/collaboration_issue.yml | 71 +++++++++++++++++++ .github/ISSUE_TEMPLATE/enabler_issue.yml | 23 +++--- 3 files changed, 134 insertions(+), 26 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/collaboration_issue.yml diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 85743b84b..2050bd3fc 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -8,8 +8,53 @@ body: - type: markdown attributes: value: | - Before reporting a bug, please review existing isssues to avoid duplication. + Before reporting a bug, please review existing issues to avoid duplication. + Issues can only be opened on supported combinations. --- + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true + - type: checkboxes + attributes: + label: Have you reviewed the required dependencies? + description: Please review that the version of ZOAU and IBM Enterprise Python are supported in the **Reference** section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + options: + - label: The dependencies are supported. + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are using a supported version. + multiple: true + options: + - v1.0.3 + - v1.1.0 + - v1.1.1 + - v1.2.0 + - v1.2.1 + - v1.2.1.1 + - v1.2.2 + - v1.2.3 + validations: + required: true + - type: dropdown + id: python-version + attributes: + label: IBM Enterprise Python + description: Which version of IBM Enterprise Python are you using. Ensure you are using a supported version. + multiple: true + options: + - v3.8.x + - v3.9.x + - v3.10.x + - v3.11.x + validations: + required: true - type: textarea id: issue-description attributes: @@ -61,7 +106,7 @@ body: id: ansible-version attributes: label: Ansible version - description: What is the verson of Ansible on the controller. + description: What is the version of Ansible on the controller. placeholder: Paste verbatim output from `ansible --version`. render: shell validations: @@ -101,22 +146,7 @@ body: - v1.11.0 validations: required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.0 - - v1.1.1 - - v1.2.0 - - v1.2.1 - - v1.2.1.1 - - v1.2.2 - validations: - required: true + - type: input id: zos-version attributes: diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml new file mode 100644 index 000000000..3c46b8f81 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -0,0 +1,71 @@ +name: Collaboration task +description: Identify a collaboration between this development team and another party. e.g, A support case, dependency effort,etc +title: "[Collaboration] " +labels: [Collaboration] +assignees: + - IBMAnsibleHelper +body: + - type: markdown + attributes: + value: | + Before authoring a task, please review existing issues to avoid duplication. + --- + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true + - type: checkboxes + id: support-issue + attributes: + label: Support and service? + description: Is support and service involved in this collaboration + options: + - label: Yes, support and service is involved. + required: true + - type: textarea + id: issue-description + attributes: + label: Collaboration description + description: Describe the task, this is the equivalent of a agile story. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true + - type: textarea + id: collaborators + attributes: + label: collaborators + description: Who or what product is part of this collaboration. + placeholder: GH IDs, product, etc + validations: + required: true + - type: dropdown + id: modules + attributes: + label: Ansible module + description: Select which modules are being reported for this task. You can select more than one. + multiple: true + options: + - zos_apf + - zos_backup_restore + - zos_blockinfile + - zos_copy + - zos_data_set + - zos_encode + - zos_fetch + - zos_find + - zos_gather_facts + - zos_job_output + - zos_job_query + - zos_job_submit + - zos_lineinfile + - zos_mount + - zos_mvs_raw + - zos_operator + - zos_operator_action_query + - zos_ping + - zos_tso_command + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index 305c15b8f..acce5523e 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -1,5 +1,5 @@ name: Enabler task -description: Identify a development task that does not correspond to other git issue types, eg this could be an enabler task. +description: Identify a development task that does not correspond to other git issue types, eg this could be a pipeline task. title: "[Enabler] <title> " labels: [Enabler] assignees: @@ -8,15 +8,14 @@ body: - type: markdown attributes: value: | - Before authoring a task, please review existing isssues to avoid duplication. + Before authoring a task, please review existing issues to avoid duplication. --- - - type: textarea - id: issue-description + - type: checkboxes attributes: - label: Enabler description - description: Describe the task, this is the equivilant of a agile story. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. - validations: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. required: true - type: dropdown id: modules @@ -46,3 +45,11 @@ body: - zos_tso_command validations: required: false + - type: textarea + id: issue-description + attributes: + label: Enabler description + description: Describe the task, this is the equivalent of a agile story. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true From 10215589f97af381ede53f9f4175774f84ae1433 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:48:32 -0700 Subject: [PATCH 030/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 2050bd3fc..c472423a2 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -13,12 +13,14 @@ body: --- - type: checkboxes attributes: + id: review-issues label: Is there an existing issue for this? description: Please search to see if an issue already exists for the bug you encountered. options: - label: There are no existing issues. required: true - type: checkboxes + id: review-dependencies attributes: label: Have you reviewed the required dependencies? description: Please review that the version of ZOAU and IBM Enterprise Python are supported in the **Reference** section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). @@ -146,7 +148,6 @@ body: - v1.11.0 validations: required: true - - type: input id: zos-version attributes: From 39f6a0f2184764b157cdce91520ef634365bfa4c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:49:48 -0700 Subject: [PATCH 031/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 78 +++++----------------------- 1 file changed, 14 insertions(+), 64 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index c472423a2..5ad715b99 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -8,55 +8,8 @@ body: - type: markdown attributes: value: | - Before reporting a bug, please review existing issues to avoid duplication. - Issues can only be opened on supported combinations. + Before reporting a bug, please review existing isssues to avoid duplication. --- - - type: checkboxes - attributes: - id: review-issues - label: Is there an existing issue for this? - description: Please search to see if an issue already exists for the bug you encountered. - options: - - label: There are no existing issues. - required: true - - type: checkboxes - id: review-dependencies - attributes: - label: Have you reviewed the required dependencies? - description: Please review that the version of ZOAU and IBM Enterprise Python are supported in the **Reference** section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). - options: - - label: The dependencies are supported. - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are using a supported version. - multiple: true - options: - - v1.0.3 - - v1.1.0 - - v1.1.1 - - v1.2.0 - - v1.2.1 - - v1.2.1.1 - - v1.2.2 - - v1.2.3 - validations: - required: true - - type: dropdown - id: python-version - attributes: - label: IBM Enterprise Python - description: Which version of IBM Enterprise Python are you using. Ensure you are using a supported version. - multiple: true - options: - - v3.8.x - - v3.9.x - - v3.10.x - - v3.11.x - validations: - required: true - type: textarea id: issue-description attributes: @@ -108,7 +61,7 @@ body: id: ansible-version attributes: label: Ansible version - description: What is the version of Ansible on the controller. + description: What is the verson of Ansible on the controller. placeholder: Paste verbatim output from `ansible --version`. render: shell validations: @@ -131,21 +84,18 @@ body: - v1.3.6 - v1.4.0-beta.1 - v1.4.0-beta.2 - - v1.4.0 - - v1.5.0-beta.1 - - v1.5.0 - - v1.6.0-beta.1 - - v1.6.0 - - v1.7.0-beta.1 - - v1.7.0 - - v1.8.0-beta.1 - - v1.8.0 - - v1.9.0-beta.1 - - v1.9.0 - - v1.10.0-beta.1 - - v1.10.0 - - v1.11.0-beta.1 - - v1.11.0 + validations: + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are on the latest PTF. + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 validations: required: true - type: input From f3577cc019059b0ab162880398ed59056da600d1 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:56:13 -0700 Subject: [PATCH 032/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 5ad715b99..074a9fe98 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -133,3 +133,11 @@ body: - zos_tso_command validations: required: false + - type: checkboxes + id: terms + attributes: + label: Code of Conduct + description: By submitting this issue, you agree to follow our [Code of Conduct](https://example.com) + options: + - label: I agree to follow this project's Code of Conduct + required: true \ No newline at end of file From 5fac429d114c5e5eebf7adf3b824682144ed22d1 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:58:00 -0700 Subject: [PATCH 033/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 074a9fe98..0c79b74ca 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -136,8 +136,8 @@ body: - type: checkboxes id: terms attributes: - label: Code of Conduct - description: By submitting this issue, you agree to follow our [Code of Conduct](https://example.com) + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. options: - - label: I agree to follow this project's Code of Conduct + - label: There are no existing issues. required: true \ No newline at end of file From 9ae69316269a407b1b4565ec242d7c376cc8c96d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 13:58:45 -0700 Subject: [PATCH 034/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 0c79b74ca..98e7f0f91 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -10,6 +10,14 @@ body: value: | Before reporting a bug, please review existing isssues to avoid duplication. --- + - type: checkboxes + id: terms + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true - type: textarea id: issue-description attributes: @@ -133,11 +141,3 @@ body: - zos_tso_command validations: required: false - - type: checkboxes - id: terms - attributes: - label: Is there an existing issue for this? - description: Please search to see if an issue already exists for the bug you encountered. - options: - - label: There are no existing issues. - required: true \ No newline at end of file From 7208aeb352bd7ee9aea13375078fdcf416df1294 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:33:44 -0700 Subject: [PATCH 035/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 130 ++++++++++++++------------- 1 file changed, 70 insertions(+), 60 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 98e7f0f91..463a40a00 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -18,6 +18,74 @@ body: options: - label: There are no existing issues. required: true + - type: checkboxes + id: terms + attributes: + label: Are the dependencies a supported version? + description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + options: + - label: The dependencies are supported. + required: true + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: | + Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` + multiple: true + options: + - v1.0.0 + - v1.1.0 + - v1.2.1 + - v1.3.0 + - v1.3.1 + - v1.3.3 + - v1.3.5 + - v1.3.6 + - v1.4.0-beta.1 + - v1.4.0-beta.2 + validations: + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are on the latest PTF. + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 + validations: + required: true + - type: dropdown + id: modules + attributes: + label: Ansible module + description: Select which modules are being reported in this bug. You can select more than one. + multiple: true + options: + - zos_apf + - zos_backup_restore + - zos_blockinfile + - zos_copy + - zos_data_set + - zos_encode + - zos_fetch + - zos_find + - zos_gather_facts + - zos_job_output + - zos_job_query + - zos_job_submit + - zos_lineinfile + - zos_mount + - zos_mvs_raw + - zos_operator + - zos_operator_action_query + - zos_ping + - zos_tso_command + validations: + required: false - type: textarea id: issue-description attributes: @@ -74,38 +142,7 @@ body: render: shell validations: required: true - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` - multiple: true - options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 - - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 - validations: - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.1 - - v1.2.0 - validations: - required: true + - type: input id: zos-version attributes: @@ -113,31 +150,4 @@ body: description: What is the version of z/OS on the managed node. validations: required: false - - type: dropdown - id: modules - attributes: - label: Ansible module - description: Select which modules are being reported in this bug. You can select more than one. - multiple: true - options: - - zos_apf - - zos_backup_restore - - zos_blockinfile - - zos_copy - - zos_data_set - - zos_encode - - zos_fetch - - zos_find - - zos_gather_facts - - zos_job_output - - zos_job_query - - zos_job_submit - - zos_lineinfile - - zos_mount - - zos_mvs_raw - - zos_operator - - zos_operator_action_query - - zos_ping - - zos_tso_command - validations: - required: false + From dfab5e3a06d1439d3f251a74dffb275b4ffd7c0d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:34:29 -0700 Subject: [PATCH 036/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 58 ++++++++++++++-------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 463a40a00..fc60de354 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -58,34 +58,7 @@ body: - v1.2.0 validations: required: true - - type: dropdown - id: modules - attributes: - label: Ansible module - description: Select which modules are being reported in this bug. You can select more than one. - multiple: true - options: - - zos_apf - - zos_backup_restore - - zos_blockinfile - - zos_copy - - zos_data_set - - zos_encode - - zos_fetch - - zos_find - - zos_gather_facts - - zos_job_output - - zos_job_query - - zos_job_submit - - zos_lineinfile - - zos_mount - - zos_mvs_raw - - zos_operator - - zos_operator_action_query - - zos_ping - - zos_tso_command - validations: - required: false + - type: textarea id: issue-description attributes: @@ -150,4 +123,31 @@ body: description: What is the version of z/OS on the managed node. validations: required: false - + - type: dropdown + id: modules + attributes: + label: Ansible module + description: Select which modules are being reported in this bug. You can select more than one. + multiple: true + options: + - zos_apf + - zos_backup_restore + - zos_blockinfile + - zos_copy + - zos_data_set + - zos_encode + - zos_fetch + - zos_find + - zos_gather_facts + - zos_job_output + - zos_job_query + - zos_job_submit + - zos_lineinfile + - zos_mount + - zos_mvs_raw + - zos_operator + - zos_operator_action_query + - zos_ping + - zos_tso_command + validations: + required: false From 36d29fbbe56f9d67545566aa39eca95a78f24f67 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:35:32 -0700 Subject: [PATCH 037/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 65 ++++++++++++++-------------- 1 file changed, 32 insertions(+), 33 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index fc60de354..3e0bdb4c9 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -26,38 +26,6 @@ body: options: - label: The dependencies are supported. required: true - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` - multiple: true - options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 - - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 - validations: - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.1 - - v1.2.0 - validations: - required: true - type: textarea id: issue-description @@ -115,7 +83,38 @@ body: render: shell validations: required: true - + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: | + Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` + multiple: true + options: + - v1.0.0 + - v1.1.0 + - v1.2.1 + - v1.3.0 + - v1.3.1 + - v1.3.3 + - v1.3.5 + - v1.3.6 + - v1.4.0-beta.1 + - v1.4.0-beta.2 + validations: + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are on the latest PTF. + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 + validations: + required: true - type: input id: zos-version attributes: From 362f742fb43d844edf2f29ccfd4ae1e406c33542 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:37:25 -0700 Subject: [PATCH 038/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 127 ++++++++++++++------------- 1 file changed, 64 insertions(+), 63 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 3e0bdb4c9..4e5a8b1e9 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -11,7 +11,7 @@ body: Before reporting a bug, please review existing isssues to avoid duplication. --- - type: checkboxes - id: terms + id: existing-issue attributes: label: Is there an existing issue for this? description: Please search to see if an issue already exists for the bug you encountered. @@ -19,14 +19,73 @@ body: - label: There are no existing issues. required: true - type: checkboxes - id: terms + id: valid-dependencies attributes: label: Are the dependencies a supported version? description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). options: - label: The dependencies are supported. required: true - + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: | + Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` + multiple: true + options: + - v1.0.0 + - v1.1.0 + - v1.2.1 + - v1.3.0 + - v1.3.1 + - v1.3.3 + - v1.3.5 + - v1.3.6 + - v1.4.0-beta.1 + - v1.4.0-beta.2 + validations: + required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM ZOAU version + description: Which version of ZOAU are you using. Ensure you are on the latest PTF. + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 + validations: + required: true + - type: dropdown + id: modules + attributes: + label: Ansible module + description: Select which modules are being reported in this bug. You can select more than one. + multiple: true + options: + - zos_apf + - zos_backup_restore + - zos_blockinfile + - zos_copy + - zos_data_set + - zos_encode + - zos_fetch + - zos_find + - zos_gather_facts + - zos_job_output + - zos_job_query + - zos_job_submit + - zos_lineinfile + - zos_mount + - zos_mvs_raw + - zos_operator + - zos_operator_action_query + - zos_ping + - zos_tso_command + validations: + required: false - type: textarea id: issue-description attributes: @@ -83,38 +142,7 @@ body: render: shell validations: required: true - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` - multiple: true - options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 - - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 - validations: - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.1 - - v1.2.0 - validations: - required: true + - type: input id: zos-version attributes: @@ -122,31 +150,4 @@ body: description: What is the version of z/OS on the managed node. validations: required: false - - type: dropdown - id: modules - attributes: - label: Ansible module - description: Select which modules are being reported in this bug. You can select more than one. - multiple: true - options: - - zos_apf - - zos_backup_restore - - zos_blockinfile - - zos_copy - - zos_data_set - - zos_encode - - zos_fetch - - zos_find - - zos_gather_facts - - zos_job_output - - zos_job_query - - zos_job_submit - - zos_lineinfile - - zos_mount - - zos_mvs_raw - - zos_operator - - zos_operator_action_query - - zos_ping - - zos_tso_command - validations: - required: false + From c5743d692e2e4a0e7a7baf2bae51dde792d2c3e0 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:52:44 -0700 Subject: [PATCH 039/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 48 ++++++++++++++++++---------- 1 file changed, 31 insertions(+), 17 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 4e5a8b1e9..8eaee478e 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -8,7 +8,7 @@ body: - type: markdown attributes: value: | - Before reporting a bug, please review existing isssues to avoid duplication. + Please complete all required fields. --- - type: checkboxes id: existing-issue @@ -26,12 +26,38 @@ body: options: - label: The dependencies are supported. required: true + - type: dropdown + id: zoau-version + attributes: + label: IBM Z Open Automation Utilities + description: Which version of ZOAU are you using? + multiple: true + options: + - v1.0.3 + - v1.1.1 + - v1.2.0 + - v1.2.1 + - v1.2.2 + validations: + required: true + - type: dropdown + id: collection-version + attributes: + label: IBM Enterprise Python + description: Which version of IBM Enterprise Python are you using? + multiple: true + options: + - v3.8.x + - v3.9.x + - v3.10.x + - v3.11.x + validations: + required: true - type: dropdown id: collection-version attributes: label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: true options: - v1.0.0 @@ -42,20 +68,8 @@ body: - v1.3.3 - v1.3.5 - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 - validations: - required: true - - type: dropdown - id: zoau-version - attributes: - label: IBM ZOAU version - description: Which version of ZOAU are you using. Ensure you are on the latest PTF. - multiple: true - options: - - v1.0.3 - - v1.1.1 - - v1.2.0 + - v1.4.0 + - v1.5.0 validations: required: true - type: dropdown From 7eca65b2a6105b500d9ec2fd0a26956a6d5c73af Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 15:58:05 -0700 Subject: [PATCH 040/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 8eaee478e..92c1eca9b 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -41,7 +41,7 @@ body: validations: required: true - type: dropdown - id: collection-version + id: python-version attributes: label: IBM Enterprise Python description: Which version of IBM Enterprise Python are you using? From 25b3f306e946bf9d9bcb83fca6e32a78e8ff5c4d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 16:00:30 -0700 Subject: [PATCH 041/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 92c1eca9b..6fe2f793d 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -1,5 +1,5 @@ name: Report a bug -description: Request that a bug be reviewed. +description: Request that a bug be reviewed. Complete all required fields. title: "[Bug] <title> " labels: [Bug] assignees: @@ -8,7 +8,6 @@ body: - type: markdown attributes: value: | - Please complete all required fields. --- - type: checkboxes id: existing-issue From c512ec9457f9a8c58dc8dca56a661e7ebde1830e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 16:03:30 -0700 Subject: [PATCH 042/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 6fe2f793d..d3570d78a 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -99,6 +99,18 @@ body: - zos_tso_command validations: required: false + - type: dropdown + id: z/OS version + attributes: + label: z/OS version + description: What is the version of z/OS on the managed node. + multiple: true + options: + - v2.3 + - v2.4 + - v2.5 + validations: + required: false - type: textarea id: issue-description attributes: @@ -155,12 +167,3 @@ body: render: shell validations: required: true - - - type: input - id: zos-version - attributes: - label: z/OS version - description: What is the version of z/OS on the managed node. - validations: - required: false - From 2089d33ffb84530e8f10feb11c3f804f69870401 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 16:09:36 -0700 Subject: [PATCH 043/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index d3570d78a..641789470 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -100,11 +100,11 @@ body: validations: required: false - type: dropdown - id: z/OS version + id: zos-version attributes: label: z/OS version - description: What is the version of z/OS on the managed node. - multiple: true + description: What is the version of z/OS on the managed node? + multiple: false options: - v2.3 - v2.4 From d7d32ac8d6a0cf7ea1f1569c646db436e8f445c3 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 16:12:48 -0700 Subject: [PATCH 044/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 641789470..556e5b0af 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -30,7 +30,7 @@ body: attributes: label: IBM Z Open Automation Utilities description: Which version of ZOAU are you using? - multiple: true + multiple: false options: - v1.0.3 - v1.1.1 @@ -44,7 +44,7 @@ body: attributes: label: IBM Enterprise Python description: Which version of IBM Enterprise Python are you using? - multiple: true + multiple: false options: - v3.8.x - v3.9.x @@ -57,7 +57,7 @@ body: attributes: label: IBM z/OS Ansible core Version description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). - multiple: true + multiple: false options: - v1.0.0 - v1.1.0 From 717c82170c96e3cad7930ac54755e51b40e38a11 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 17:07:31 -0700 Subject: [PATCH 045/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 93 +++++++++++++++------------- 1 file changed, 51 insertions(+), 42 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 556e5b0af..694b07359 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -32,11 +32,11 @@ body: description: Which version of ZOAU are you using? multiple: false options: - - v1.0.3 - - v1.1.1 - - v1.2.0 - - v1.2.1 - v1.2.2 + - v1.2.1 + - v1.2.0 + - v1.1.1 + - v1.0.3 validations: required: true - type: dropdown @@ -46,10 +46,10 @@ body: description: Which version of IBM Enterprise Python are you using? multiple: false options: - - v3.8.x - - v3.9.x - - v3.10.x - v3.11.x + - v3.10.x + - v3.9.x + - v3.8.x validations: required: true - type: dropdown @@ -59,18 +59,45 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 - - v1.3.6 - - v1.4.0 - v1.5.0 + - v1.4.0 + - v1.3.6 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + - v1.2.1 + - v1.1.0 + - v1.0.0 validations: required: true + - type: dropdown + id: zos-version + attributes: + label: ansible-version + description: What is the version of Ansible on the controller (`ansible --version`)? + multiple: false + options: + - latest + - v2.14.x + - v2.13.x + - v2.12.x + - v2.11.x + - v2.9.x + validations: + required: false + - type: dropdown + id: zos-version + attributes: + label: z/OS version + description: What is the version of z/OS on the managed node? + multiple: false + options: + - v2.5 + - v2.4 + - v2.3 + validations: + required: false - type: dropdown id: modules attributes: @@ -99,18 +126,6 @@ body: - zos_tso_command validations: required: false - - type: dropdown - id: zos-version - attributes: - label: z/OS version - description: What is the version of z/OS on the managed node? - multiple: false - options: - - v2.3 - - v2.4 - - v2.5 - validations: - required: false - type: textarea id: issue-description attributes: @@ -127,18 +142,20 @@ body: - type: textarea id: issue-output attributes: - label: Playbook verbosity output + label: Playbook verbosity output. description: Provide the command line output with debug and verbosity enabled. placeholder: | - Insert the ouput using this form of the playbook command. + Insert the output using this form of the playbook command. - `ANSIBLE_DEBUG=1 ansible-playbook -i inventory your-playbook.yml -vvvv` validations: required: false - type: textarea id: ansible-cfg attributes: - label: Contents of `ansible.cfg` - description: Provide the contents of `ansible.cfg`. + label: Ansible configuration. + description: Show the current **ansible.cfg** settings. + placeholder: | + Insert for this command: `ansible-config view` render: YAML validations: required: false @@ -146,7 +163,7 @@ body: id: ansible-inventory attributes: label: Contents of the inventory - description: Provide the contents of the inventory + description: Provide the contents of the inventory. render: YAML validations: required: false @@ -158,12 +175,4 @@ body: render: YAML validations: required: false - - type: textarea - id: ansible-version - attributes: - label: Ansible version - description: What is the verson of Ansible on the controller. - placeholder: Paste verbatim output from `ansible --version`. - render: shell - validations: - required: true + From 5f63a9aa7a7bc0494b67c081fdfd9a8a1bb4652c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 17:13:17 -0700 Subject: [PATCH 046/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 694b07359..6daddefcb 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -72,7 +72,7 @@ body: validations: required: true - type: dropdown - id: zos-version + id: ansible-version attributes: label: ansible-version description: What is the version of Ansible on the controller (`ansible --version`)? From 7beff18204f0e49d9622c29b80698077649035cb Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 17:15:03 -0700 Subject: [PATCH 047/495] Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 6daddefcb..1ab6eb602 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -85,7 +85,7 @@ body: - v2.11.x - v2.9.x validations: - required: false + required: true - type: dropdown id: zos-version attributes: From 097e1ac44ddf516467178998bc6adc1fd3b54618 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 20:45:27 -0700 Subject: [PATCH 048/495] Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .../ISSUE_TEMPLATE/collaboration_issue.yml | 121 +++++++++++++++--- 1 file changed, 102 insertions(+), 19 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 3c46b8f81..ca9ab8a49 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -1,5 +1,5 @@ -name: Collaboration task -description: Identify a collaboration between this development team and another party. e.g, A support case, dependency effort,etc +name: Collaboration Issue +description: A collaboration with the development team and another external resource. e.g, Support case, dependency, community, etc title: "[Collaboration] <title> " labels: [Collaboration] assignees: @@ -8,44 +8,109 @@ body: - type: markdown attributes: value: | - Before authoring a task, please review existing issues to avoid duplication. --- - type: checkboxes + id: existing-issue attributes: label: Is there an existing issue for this? description: Please search to see if an issue already exists for the bug you encountered. - options: - - label: There are no existing issues. - required: true + options: + - label: There are no existing issues. + required: true - type: checkboxes id: support-issue attributes: label: Support and service? - description: Is support and service involved in this collaboration + description: Is support and service involved in this collaboration? options: - label: Yes, support and service is involved. required: true - - type: textarea - id: issue-description + - type: checkboxes + id: valid-dependencies attributes: - label: Collaboration description - description: Describe the task, this is the equivalent of a agile story. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. + label: Are the dependencies a supported version? + description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + options: + - label: The dependencies are supported. + required: false + - type: dropdown + id: zoau-version + attributes: + label: IBM Z Open Automation Utilities + description: Which version of ZOAU are you using? + multiple: false + options: + - v1.2.2 + - v1.2.1 + - v1.2.0 + - v1.1.1 + - v1.0.3 validations: - required: true - - type: textarea - id: collaborators + required: false + - type: dropdown + id: python-version attributes: - label: collaborators - description: Who or what product is part of this collaboration. - placeholder: GH IDs, product, etc + label: IBM Enterprise Python + description: Which version of IBM Enterprise Python are you using? + multiple: false + options: + - v3.11.x + - v3.10.x + - v3.9.x + - v3.8.x + validations: + required: false + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + multiple: false + options: + - v1.5.0 + - v1.4.0 + - v1.3.6 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + - v1.2.1 + - v1.1.0 + - v1.0.0 validations: required: true + - type: dropdown + id: ansible-version + attributes: + label: ansible-version + description: What is the version of Ansible on the controller (`ansible --version`)? + multiple: false + options: + - latest + - v2.14.x + - v2.13.x + - v2.12.x + - v2.11.x + - v2.9.x + validations: + required: false + - type: dropdown + id: zos-version + attributes: + label: z/OS version + description: What is the version of z/OS on the managed node? + multiple: false + options: + - v2.5 + - v2.4 + - v2.3 + validations: + required: false - type: dropdown id: modules attributes: label: Ansible module - description: Select which modules are being reported for this task. You can select more than one. + description: Select which modules are being reported in this bug. You can select more than one. multiple: true options: - zos_apf @@ -69,3 +134,21 @@ body: - zos_tso_command validations: required: false + - type: textarea + id: issue-description + attributes: + label: Collaboration description + description: Describe the collaboration issue. + placeholder: | + For example + 1. Working with IBM Enterprise Python to resolve issue xyz. + 2. Working with z/OS application team DFSMS to resolve xyz. + 3. Assisting IBM support to resolve an ibm_zos_copy issue. + validations: + required: true + + + + + + From 753bfbd626a639ef55d8aa09d51cfea58011514c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 20:47:21 -0700 Subject: [PATCH 049/495] Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index ca9ab8a49..420658709 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -22,9 +22,9 @@ body: attributes: label: Support and service? description: Is support and service involved in this collaboration? - options: - - label: Yes, support and service is involved. - required: true + options: + - label: Yes, support and service is involved. + required: true - type: checkboxes id: valid-dependencies attributes: From ebb770a535ffebebff10b3b66f7acae530305e8f Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 20:49:55 -0700 Subject: [PATCH 050/495] Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 420658709..a1dfe96e8 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -1,14 +1,10 @@ -name: Collaboration Issue +name: Request a Collaboration description: A collaboration with the development team and another external resource. e.g, Support case, dependency, community, etc title: "[Collaboration] <title> " labels: [Collaboration] assignees: - IBMAnsibleHelper body: - - type: markdown - attributes: - value: | - --- - type: checkboxes id: existing-issue attributes: From b83571d3b3b0a717ea759d3f40080829fe629f4d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 20:58:38 -0700 Subject: [PATCH 051/495] Template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 4 --- .../ISSUE_TEMPLATE/collaboration_issue.yml | 2 +- .github/ISSUE_TEMPLATE/doc_issue.yml | 15 ++++++---- .github/ISSUE_TEMPLATE/enabler_issue.yml | 12 +++----- .../enhancement_feature.issue.yml | 30 +++++++++++-------- .github/ISSUE_TEMPLATE/module_issue.yml | 11 ++++--- 6 files changed, 38 insertions(+), 36 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 1ab6eb602..359add494 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -5,10 +5,6 @@ labels: [Bug] assignees: - IBMAnsibleHelper body: - - type: markdown - attributes: - value: | - --- - type: checkboxes id: existing-issue attributes: diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index a1dfe96e8..4f9db151e 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -24,7 +24,7 @@ body: - type: checkboxes id: valid-dependencies attributes: - label: Are the dependencies a supported version? + label: Are the dependencies a supported? description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). options: - label: The dependencies are supported. diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index 9485a79a7..c7bced03d 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -5,11 +5,14 @@ labels: [Documentation] assignees: - IBMAnsibleHelper body: - - type: markdown + - type: checkboxes + id: existing-issue attributes: - value: | - Before reporting a documentation issue, please review existing isssues to avoid duplication. - --- + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true - type: textarea id: issue-description attributes: @@ -17,7 +20,7 @@ body: description: Describe the documentation issue. placeholder: | Verbosity is encouraged, the more you share the better for us to understand. - 1. Include links to the page you are reffering to if applicable + 1. Include links to the page you are referring to if applicable 2. Include reproduction steps if applicable 3. Include any additional information that will help us 4. Include screen captures of applicable @@ -28,7 +31,7 @@ body: id: ansible-version attributes: label: Ansible version - description: What is the verson of Ansible on the controller if applicable. + description: What is the version of Ansible on the controller if applicable. placeholder: Paste verbatim output from `ansible --version`. render: SHELL validations: diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index acce5523e..18abe0400 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -5,18 +5,14 @@ labels: [Enabler] assignees: - IBMAnsibleHelper body: - - type: markdown - attributes: - value: | - Before authoring a task, please review existing issues to avoid duplication. - --- - type: checkboxes + id: existing-issue attributes: label: Is there an existing issue for this? description: Please search to see if an issue already exists for the bug you encountered. - options: - - label: There are no existing issues. - required: true + options: + - label: There are no existing issues. + required: true - type: dropdown id: modules attributes: diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index 597bebbf2..02901be8c 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -5,19 +5,14 @@ labels: [Enhancement] assignees: - IBMAnsibleHelper body: - - type: markdown + - type: checkboxes + id: existing-issue attributes: - value: | - Before requesting an enhancement or feature, please review existing isssues to avoid duplication. - --- - - type: textarea - id: issue-description - attributes: - label: Enhancement or featture description - description: Describe the enhancement or feature you are requesting. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. - validations: - required: true + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true - type: dropdown id: modules attributes: @@ -45,4 +40,13 @@ body: - zos_ping - zos_tso_command validations: - required: false + required: true + - type: textarea + id: issue-description + attributes: + label: Enhancement or feature description + description: Describe the enhancement or feature you are requesting. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true + diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml index f11ca537a..60dee4415 100644 --- a/.github/ISSUE_TEMPLATE/module_issue.yml +++ b/.github/ISSUE_TEMPLATE/module_issue.yml @@ -5,11 +5,14 @@ labels: [Module] assignees: - IBMAnsibleHelper body: - - type: markdown + - type: checkboxes + id: existing-issue attributes: - value: | - Before requesting a new module, please review existing isssues to avoid duplication. - --- + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: There are no existing issues. + required: true - type: textarea id: issue-description attributes: From 1b938f9fdd2bc103cf2f6efa79e9dc583fb5c582 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 16 Mar 2023 21:04:29 -0700 Subject: [PATCH 052/495] Template updates Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 2 +- .github/ISSUE_TEMPLATE/doc_issue.yml | 2 +- .github/ISSUE_TEMPLATE/enabler_issue.yml | 4 +++- .github/ISSUE_TEMPLATE/enhancement_feature.issue.yml | 2 +- .github/ISSUE_TEMPLATE/module_issue.yml | 2 +- 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 4f9db151e..4ea4e4108 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -1,5 +1,5 @@ name: Request a Collaboration -description: A collaboration with the development team and another external resource. e.g, Support case, dependency, community, etc +description: Request collaboration with a member of this team. Complete all required fields. title: "[Collaboration] <title> " labels: [Collaboration] assignees: diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index c7bced03d..07ddbc40e 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -1,5 +1,5 @@ name: Report a documentation issue -description: Request that documentation be reviewed. +description: Request that documentation be reviewed. Complete all required fields. title: "[Documentation] <title> " labels: [Documentation] assignees: diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index 18abe0400..37131e500 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -1,5 +1,7 @@ name: Enabler task -description: Identify a development task that does not correspond to other git issue types, eg this could be a pipeline task. +description: | + Identify a development task that does not correspond to other git issue types, eg this could be a pipeline task. + Complete all required fields. title: "[Enabler] <title> " labels: [Enabler] assignees: diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index 02901be8c..d39840872 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -1,5 +1,5 @@ name: Request an enhancement or new feature -description: Request a new feature or that content be enhanced. +description: Request a new feature or an enhancement. Complete all required fields. title: "[Enhancement] <title> " labels: [Enhancement] assignees: diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml index 60dee4415..beea537e9 100644 --- a/.github/ISSUE_TEMPLATE/module_issue.yml +++ b/.github/ISSUE_TEMPLATE/module_issue.yml @@ -1,5 +1,5 @@ name: Request a new module -description: Request a new module be added to the collection. +description: Request a new module be added to the collection. Complete all required fields. title: "[Module] <title> " labels: [Module] assignees: From 8b1796aeaf69090dbef9e55288cd9113b4a45ab5 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:17:32 -0700 Subject: [PATCH 053/495] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 4ea4e4108..f71fcf355 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -20,14 +20,16 @@ body: description: Is support and service involved in this collaboration? options: - label: Yes, support and service is involved. - required: true + - label: No, support and service is involved. + validations: + required: false - type: checkboxes id: valid-dependencies attributes: label: Are the dependencies a supported? description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). options: - - label: The dependencies are supported. + - label: Yes, the dependencies are supported. required: false - type: dropdown id: zoau-version From ab87d126702db1b10f8a1b59b5246468a27872f0 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:18:51 -0700 Subject: [PATCH 054/495] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index f71fcf355..ebb81d8e3 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -22,7 +22,7 @@ body: - label: Yes, support and service is involved. - label: No, support and service is involved. validations: - required: false + required: true - type: checkboxes id: valid-dependencies attributes: From 2a92a72b614bdaa16dff3b38a11664d9ddf420dc Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:19:55 -0700 Subject: [PATCH 055/495] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index ebb81d8e3..2137baddf 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -20,9 +20,9 @@ body: description: Is support and service involved in this collaboration? options: - label: Yes, support and service is involved. + required: true - label: No, support and service is involved. - validations: - required: true + required: true - type: checkboxes id: valid-dependencies attributes: From 65df75647edcf990fd6001ce1c7dbb390940fa07 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:21:05 -0700 Subject: [PATCH 056/495] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index 2137baddf..eab9c5b33 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -20,9 +20,9 @@ body: description: Is support and service involved in this collaboration? options: - label: Yes, support and service is involved. - required: true + required: false - label: No, support and service is involved. - required: true + required: false - type: checkboxes id: valid-dependencies attributes: From 85180bd94384fd201fdf3be99625130a74de7f0b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:27:24 -0700 Subject: [PATCH 057/495] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index eab9c5b33..fcd264828 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -31,6 +31,8 @@ body: options: - label: Yes, the dependencies are supported. required: false + - label: Not applicable to this collaboration. + required: false - type: dropdown id: zoau-version attributes: From 8f9faec3dd41d0486b5d6c4d87dc3e2c8077ed3a Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 18 Mar 2023 21:29:57 -0700 Subject: [PATCH 058/495] add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/collaboration_issue.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index fcd264828..bf6db4778 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -52,7 +52,7 @@ body: attributes: label: IBM Enterprise Python description: Which version of IBM Enterprise Python are you using? - multiple: false + multiple: true options: - v3.11.x - v3.10.x @@ -78,7 +78,7 @@ body: - v1.1.0 - v1.0.0 validations: - required: true + required: false - type: dropdown id: ansible-version attributes: From a6a30a5500b519cc86050a979ecda445a4d28699 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 22 Mar 2023 17:05:55 -0400 Subject: [PATCH 059/495] changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. --- ...os-job-query-handle-multiple-wildcards.yml | 4 ++ plugins/module_utils/job.py | 5 ++- plugins/modules/zos_job_query.py | 38 +++++++++++++------ .../modules/test_zos_job_query_func.py | 10 +++++ 4 files changed, 45 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml new file mode 100644 index 000000000..43c6f0525 --- /dev/null +++ b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml @@ -0,0 +1,4 @@ +enhancements: + - zos_job_query - This bugfix adjusts the job_name parameter to handle multiple embedded wildcards. + This also required change to job.py/_get_job_status to follow the wildcard feature. + (https://github.com/ansible-collections/ibm_zos_core/pull/---) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d7c156673..e97e30784 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -13,6 +13,7 @@ __metaclass__ = type +import fnmatch import re from time import sleep from timeit import default_timer as timer @@ -207,7 +208,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= if owner != entry.owner: continue if job_name != "*": - if job_name != entry.name: + # if job_name != entry.name: + # continue + if not fnmatch.fnmatch( entry.name, job_name ): continue job = {} diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 8f7d7fc93..0c1878816 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -208,8 +208,8 @@ def run_module(): return result try: - validate_arguments(module.params) - jobs_raw = query_jobs(module.params) + name, id, owner = validate_arguments(module.params) + jobs_raw = query_jobs(name, id, owner) jobs = parsing_jobs(jobs_raw) except Exception as e: module.fail_json(msg=e, **result) @@ -217,8 +217,10 @@ def run_module(): module.exit_json(**result) +# validate_arguments rturns a tuple, so we don't have to rebuild the job_name string def validate_arguments(params): job_name_in = params.get("job_name") + job_name_final = job_name_in job_id = params.get("job_id") owner = params.get("owner") if job_name_in or job_id: @@ -229,10 +231,26 @@ def validate_arguments(params): ) m = job_name_pattern.search(job_name_in) n = job_name_pattern_with_star.search(job_name_in) - if m or n: - pass - else: + # logic twist: o must be non-null value from m or n + o = m + if n: + o = n + + # if neither m nor n were non-null, check if the string needed to be truncated to the first * + if not o: + ix = job_name_in.find("*") + if ix >= 0: + job_name_short = job_name_in[0, ix+1] + o = job_name_pattern.search(job_name_short) + if not o: + o = job_name_pattern_with_star.search(job_name_short) + if o: + job_name_final = job_name_short + + # so now, fail if neither m, n, or o=m/n(short) found a match + if not o: raise RuntimeError("Failed to validate the job name: " + job_name_in) + if job_id: job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") if not job_id_pattern.search(job_id): @@ -242,19 +260,17 @@ def validate_arguments(params): if job_id and owner: raise RuntimeError("Argument Error:job id can not be co-exist with owner") + return job_name_final, job_id, owner; -def query_jobs(params): - job_name_in = params.get("job_name") - job_id = params.get("job_id") - owner = params.get("owner") +def query_jobs(job_name, job_id, owner): jobs = [] if job_id: jobs = job_status(job_id=job_id) elif owner: - jobs = job_status(owner=owner, job_name=job_name_in) + jobs = job_status(owner=owner, job_name=job_name) else: - jobs = job_status(job_name=job_name_in) + jobs = job_status(job_name=job_name) if not jobs: raise RuntimeError("List FAILED! no such job was found.") return jobs diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index b94be19a7..32914731c 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -33,3 +33,13 @@ def test_zos_job_query_func(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("jobs") is not None + + +# test to show multi wildcard won't crash the search +def test_zos_job_query_multi_wildcards_func(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_job_query(job_name="JOB*1*", owner="*") + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs") is not None \ No newline at end of file From e4b28dea7f3bc1538c29142381e57fe7d915c73d Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 23 Mar 2023 15:37:21 -0400 Subject: [PATCH 060/495] expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete --- ...os-job-query-handle-multiple-wildcards.yml | 4 +-- plugins/module_utils/job.py | 7 ++-- plugins/modules/zos_job_query.py | 33 ++++++++++++++++--- 3 files changed, 35 insertions(+), 9 deletions(-) diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml index 43c6f0525..ae2871b9f 100644 --- a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml +++ b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml @@ -1,4 +1,4 @@ enhancements: - - zos_job_query - This bugfix adjusts the job_name parameter to handle multiple embedded wildcards. - This also required change to job.py/_get_job_status to follow the wildcard feature. + - zos_job_query - This enhancement adjusts the job_name and job_id parameters to handle embedded wildcards. + This also required change to job.py/_get_job_status to follow the wildcard feature, using fnmatch logic. (https://github.com/ansible-collections/ibm_zos_core/pull/---) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index e97e30784..8253b7ee5 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -132,9 +132,9 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): """ arg_defs = dict( - job_id=dict(arg_type="qualifier_pattern"), + job_id=dict(arg_type="str"), owner=dict(arg_type="qualifier_pattern"), - job_name=dict(arg_type="qualifier_pattern"), + job_name=dict(arg_type="str"), dd_name=dict(arg_type="str"), ) @@ -212,6 +212,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # continue if not fnmatch.fnmatch( entry.name, job_name ): continue + if job_id_temp != None: + if not fnmatch.fnmatch( entry.id, job_id ): + continue job = {} job["job_id"] = entry.id diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 0c1878816..a6d905933 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -210,7 +210,11 @@ def run_module(): try: name, id, owner = validate_arguments(module.params) jobs_raw = query_jobs(name, id, owner) - jobs = parsing_jobs(jobs_raw) + if jobs_raw: + jobs = parsing_jobs(jobs_raw) + else: + jobs = None + except Exception as e: module.fail_json(msg=e, **result) result["jobs"] = jobs @@ -221,7 +225,10 @@ def run_module(): def validate_arguments(params): job_name_in = params.get("job_name") job_name_final = job_name_in + job_id = params.get("job_id") + job_id_final = job_id + owner = params.get("owner") if job_name_in or job_id: if job_name_in and job_name_in != "*": @@ -240,7 +247,7 @@ def validate_arguments(params): if not o: ix = job_name_in.find("*") if ix >= 0: - job_name_short = job_name_in[0, ix+1] + job_name_short = job_name_in[0:ix+1] o = job_name_pattern.search(job_name_short) if not o: o = job_name_pattern_with_star.search(job_name_short) @@ -249,18 +256,34 @@ def validate_arguments(params): # so now, fail if neither m, n, or o=m/n(short) found a match if not o: - raise RuntimeError("Failed to validate the job name: " + job_name_in) + raise RuntimeError("Failed to validate the job name: " + job_name_in + " ix was " + ix + " short was " + job_name_short) if job_id: job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") - if not job_id_pattern.search(job_id): + m = job_id_pattern.search(job_id) + o = None + + if not m: + ix = job_id.find("*") + if ix > 0: + # this differs from job_name, in that we'll drop the star for the search + job_id_short = job_id[0:ix] + + if job_id_short[0:3] in ['JOB','TSU','STC'] or job_id_short[0:1] in ['J','T','S']: + o = job_id_short + + if o: + job_id_final = job_id_short + '*' + + if not m and not o: raise RuntimeError("Failed to validate the job id: " + job_id) else: raise RuntimeError("Argument Error:Either job name(s) or job id is required") if job_id and owner: raise RuntimeError("Argument Error:job id can not be co-exist with owner") - return job_name_final, job_id, owner; + # return job_name_final, job_id_final, owner; + return job_name_in, job_id, owner; def query_jobs(job_name, job_id, owner): From 556dd2ff4e213f5ed9dab3307713d6eb5840166f Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 23 Mar 2023 15:53:04 -0400 Subject: [PATCH 061/495] cleaned up pep8 issues --- plugins/module_utils/job.py | 6 +++--- plugins/modules/zos_job_query.py | 27 ++++++++++++++------------- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 8253b7ee5..00ec0407f 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -210,10 +210,10 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= if job_name != "*": # if job_name != entry.name: # continue - if not fnmatch.fnmatch( entry.name, job_name ): + if not fnmatch.fnmatch(entry.name, job_name): continue - if job_id_temp != None: - if not fnmatch.fnmatch( entry.id, job_id ): + if job_id_temp is not None: + if not fnmatch.fnmatch(entry.id, job_id): continue job = {} diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index a6d905933..22968bc62 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -247,12 +247,12 @@ def validate_arguments(params): if not o: ix = job_name_in.find("*") if ix >= 0: - job_name_short = job_name_in[0:ix+1] - o = job_name_pattern.search(job_name_short) - if not o: - o = job_name_pattern_with_star.search(job_name_short) - if o: - job_name_final = job_name_short + job_name_short = job_name_in[0:ix + 1] + o = job_name_pattern.search(job_name_short) + if not o: + o = job_name_pattern_with_star.search(job_name_short) + if o: + job_name_final = job_name_short # so now, fail if neither m, n, or o=m/n(short) found a match if not o: @@ -266,14 +266,14 @@ def validate_arguments(params): if not m: ix = job_id.find("*") if ix > 0: - # this differs from job_name, in that we'll drop the star for the search - job_id_short = job_id[0:ix] + # this differs from job_name, in that we'll drop the star for the search + job_id_short = job_id[0:ix] - if job_id_short[0:3] in ['JOB','TSU','STC'] or job_id_short[0:1] in ['J','T','S']: - o = job_id_short + if job_id_short[0:3] in ['JOB', 'TSU', 'STC'] or job_id_short[0:1] in ['J', 'T', 'S']: + o = job_id_short - if o: - job_id_final = job_id_short + '*' + if o: + job_id_final = job_id_short + '*' if not m and not o: raise RuntimeError("Failed to validate the job id: " + job_id) @@ -283,7 +283,8 @@ def validate_arguments(params): raise RuntimeError("Argument Error:job id can not be co-exist with owner") # return job_name_final, job_id_final, owner; - return job_name_in, job_id, owner; + return job_name_in, job_id, owner + def query_jobs(job_name, job_id, owner): From 7b93feca57a0f4f59e3d1ef55213297b439c3dd6 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 27 Mar 2023 15:19:35 -0700 Subject: [PATCH 062/495] Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/683-zos_job_submit-bugs.yml | 35 ++++ docs/source/modules/zos_job_submit.rst | 26 +-- docs/source/modules/zos_operator.rst | 16 +- plugins/action/zos_job_submit.py | 16 ++ plugins/module_utils/job.py | 16 +- plugins/modules/zos_job_submit.py | 126 ++++++++---- .../modules/test_zos_job_submit_func.py | 192 +++++++++++++++++- 7 files changed, 350 insertions(+), 77 deletions(-) create mode 100644 changelogs/fragments/683-zos_job_submit-bugs.yml diff --git a/changelogs/fragments/683-zos_job_submit-bugs.yml b/changelogs/fragments/683-zos_job_submit-bugs.yml new file mode 100644 index 000000000..b77fbdbc9 --- /dev/null +++ b/changelogs/fragments/683-zos_job_submit-bugs.yml @@ -0,0 +1,35 @@ +bugfixes: +- zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that a + stack trace would result in the response, issue 623. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is purged by the system that a + stack trace would result in the response, issue 681. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue where the response did not include the + job log when a non-zero return code would occur, issue 655. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when resources (data sets) identified in JCL + did not exist such that a stack trace would result in the response, issue 624. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would + result in a `type` error that a stack trace would result in the response, + issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job encounters a security exception no + job log would would result in the response, issue 684. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` + to return a response, issue 685. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that no job log would result in the response, issue 685. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +trivial: +- zos_job_submit - Update documentation to for deprecated `wait` option and + expand on the `wait_time_s` description, issue 670. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Update documentation to describing the significance of '?' + for the 'ret_code' properties 'msg_text', 'msg_code' and 'msg', issue 685. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_operator - Update restructured text to include the updated examples. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) \ No newline at end of file diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index bcf0c5383..bb438f8a5 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -16,9 +16,9 @@ zos_job_submit -- Submit JCL Synopsis -------- -- Submit JCL from DATA_SET , USS, or LOCAL location. -- Submit a job and optionally monitor for its execution. -- Optionally wait a designated time until the job finishes. +- Submit JCL from a data set, USS, or from the controller. +- Submit a job and optionally monitor for completion. +- Optionally, wait a designated time until the job finishes. - For an uncataloged dataset, specify the volume serial number. @@ -32,7 +32,7 @@ Parameters src The source file or data set containing the JCL to submit. - It could be physical sequential data set or a partitioned data set qualified by a member or a path. (e.g "USER.TEST","USER.JCL(TEST)") + It could be a physical sequential data set, a partitioned data set qualified by a member or a path. (e.g "USER.TEST","USER.JCL(TEST)") Or a USS file. (e.g "/u/tester/demo/sample.jcl") @@ -58,20 +58,20 @@ location wait - Configuring wait used by the `zos_job_submit <./zos_job_submit.html>`_ module has been deprecated and will be removed in ibm.ibm_zos_core collection. + Setting this option will yield no change, it is deprecated. There is no no need to set *wait*; setting *wait_times_s* is the correct way to configure the amount of tme to wait for a job to execute. - Setting this option will yield no change, it is deprecated. + Configuring wait used by the `zos_job_submit <./zos_job_submit.html>`_ module has been deprecated and will be removed in ibm.ibm_zos_core collection. - See option ``wait_time_s``. + See option *wait_time_s*. | **required**: False | **type**: bool wait_time_s - When *wait* is true, the module will wait for the number of seconds for Job completion. + Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. - User can set the wait time manually with this option. + *wait_time_s* is measured in seconds and must be a value greater than 0 and less than 86400. | **required**: False | **type**: int @@ -100,7 +100,7 @@ volume When configured, the `zos_job_submit <./zos_job_submit.html>`_ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - Ignored for USS and LOCAL. + Ignored for *location=USS* and *location=LOCAL*. | **required**: False | **type**: str @@ -548,18 +548,18 @@ jobs } msg - Return code resulting from the job submission. + Return code resulting from the job submission. Jobs that take longer to assign a value can have a value of '?'. | **type**: str | **sample**: CC 0000 msg_code - Return code extracted from the `msg` so that it can be evaluated as a string. + Return code extracted from the `msg` so that it can be evaluated as a string. Jobs that take longer to assign a value can have a value of '?'. | **type**: str msg_txt - Returns additional information related to the job. + Returns additional information related to the job. Jobs that take longer to assign a value can have a value of '?'. | **type**: str | **sample**: The job completion code (CC) was not available in the job output, please review the job log." diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 7742e60cd..868c78a10 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -59,10 +59,12 @@ wait_time_s wait - Configuring wait used by the `zos_operator <./zos_operator.html>`_ module has been deprecated and will be removed in ibm.ibm_zos_core collection. + Configuring wait used by the `zos_operator <./zos_operator.html>`_ module has been deprecated and will be removed in a future ibm.ibm_zos_core collection. Setting this option will yield no change, it is deprecated. + Review option *wait_time_s* to instruct operator commands to wait. + | **required**: False | **type**: bool | **default**: True @@ -76,13 +78,13 @@ Examples .. code-block:: yaml+jinja - - name: Execute an operator command to show active jobs + - name: Execute an operator command to show device status and allocation zos_operator: - cmd: 'd u,all' + cmd: 'd u' - - name: Execute an operator command to show active jobs with verbose information + - name: Execute an operator command to show device status and allocation with verbose information zos_operator: - cmd: 'd u,all' + cmd: 'd u' verbose: true - name: Execute an operator command to purge all job logs (requires escaping) @@ -91,12 +93,12 @@ Examples - name: Execute operator command to show jobs, waiting up to 5 seconds for response zos_operator: - cmd: 'd u,all' + cmd: 'd a,all' wait_time_s: 5 - name: Execute operator command to show jobs, always waiting 7 seconds for response zos_operator: - cmd: 'd u,all' + cmd: 'd a,all' wait_time_s: 7 diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index dd4d8e06f..7247f6b7b 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -136,4 +136,20 @@ def run(self, tmp=None, task_vars=None): ) ) + def delete_dict_entries(entries, dictionary): + """ Deletes entries from a dictionary when provided key and dictionary. + + Arguments: + entries (tuple) - entries to delete from dictionary + dictionary (dic) - dictionary to remove entries + """ + for key in entries: + if key in dictionary: + del dictionary[key] + + # Currently the direction is undecided if we should continue to use the + # community action plugins or transition to SFTP, so this code + # can remain should we want to clean up unrelated response values. + # entries = ('checksum', 'dest', 'gid', 'group', 'md5sum', 'mode', 'owner', 'size', 'src', 'state', 'uid') + # delete_dict_entries(entries, result) return result diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d7c156673..478a605e5 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -35,10 +35,13 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, """Get the output from a z/OS job based on various search criteria. Keyword Arguments: - job_id {str} -- The job ID to search for (default: {None}) - owner {str} -- The owner of the job (default: {None}) - job_name {str} -- The job name search for (default: {None}) - dd_name {str} -- The data definition to retrieve (default: {None}) + job_id (str) -- The job ID to search for (default: {None}) + owner (str) -- The owner of the job (default: {None}) + job_name (str) -- The job name search for (default: {None}) + dd_name (str) -- The data definition to retrieve (default: {None}) + duration (int) -- The time the submitted job ran for + timeout (int) - how long to wait in seconds for a job to complete + start_time (int) - time the JCL started its submission Returns: list[dict] -- The output information for a list of jobs matching specified criteria. @@ -220,7 +223,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"] = {} job["ret_code"]["msg"] = entry.status + " " + entry.rc job["ret_code"]["msg_code"] = entry.rc - job["ret_code"]["code"] = "" + # Why was this set to an empty string? + job["ret_code"]["code"] = None if len(entry.rc) > 0: if entry.rc.isdigit(): job["ret_code"]["code"] = int(entry.rc) @@ -312,7 +316,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"]["msg"] = tmptext.strip() job["ret_code"]["msg_code"] = None job["ret_code"]["code"] = None - if len(list_of_dds) > 1: + if len(list_of_dds) > 0: # The duration should really only be returned for job submit but the code # is used job_output as well, for now we can ignore this point unless # we want to offer a wait_time_s for job output which might be reasonable. diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 6b3df1506..a58e138a1 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -25,9 +25,9 @@ - "Demetrios Dimatos (@ddimatos)" short_description: Submit JCL description: - - Submit JCL from DATA_SET , USS, or LOCAL location. - - Submit a job and optionally monitor for its execution. - - Optionally wait a designated time until the job finishes. + - Submit JCL from a data set, USS, or from the controller. + - Submit a job and optionally monitor for completion. + - Optionally, wait a designated time until the job finishes. - For an uncataloged dataset, specify the volume serial number. version_added: "1.0.0" options: @@ -36,7 +36,7 @@ type: str description: - The source file or data set containing the JCL to submit. - - It could be physical sequential data set or a partitioned data set + - It could be a physical sequential data set, a partitioned data set qualified by a member or a path. (e.g "USER.TEST","USER.JCL(TEST)") - Or a USS file. (e.g "/u/tester/demo/sample.jcl") - Or a LOCAL file in ansible control node. @@ -59,17 +59,23 @@ default: false type: bool description: + - Setting this option will yield no change, it is deprecated. There is no + no need to set I(wait); setting I(wait_times_s) is the correct way to + configure the amount of tme to wait for a job to execute. - Configuring wait used by the L(zos_job_submit,./zos_job_submit.html) module has been deprecated and will be removed in ibm.ibm_zos_core collection. - - Setting this option will yield no change, it is deprecated. - - See option ``wait_time_s``. + - See option I(wait_time_s). wait_time_s: required: false default: 10 type: int description: - - When I(wait) is true, the module will wait for the number of seconds for Job completion. - - User can set the wait time manually with this option. + - Option I(wait_time_s) is the total time that module + L(zos_job_submit,./zos_job_submit.html) will wait for a submitted job + to complete. The time begins when the module is executed on the managed + node. + - I(wait_time_s) is measured in seconds and must be a value greater than 0 + and less than 86400. max_rc: required: false type: int @@ -91,7 +97,7 @@ - When configured, the L(zos_job_submit,./zos_job_submit.html) will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - - Ignored for USS and LOCAL. + - Ignored for I(location=USS) and I(location=LOCAL). encoding: description: - Specifies which encoding the local JCL file should be converted from @@ -218,18 +224,21 @@ contains: msg: description: - Return code resulting from the job submission. + Return code resulting from the job submission. Jobs that take + longer to assign a value can have a value of '?'. type: str sample: CC 0000 msg_code: description: Return code extracted from the `msg` so that it can be evaluated - as a string. + as a string. Jobs that take longer to assign a value can have a + value of '?'. type: str sample: 0000 msg_txt: description: - Returns additional information related to the job. + Returns additional information related to the job. Jobs that take + longer to assign a value can have a value of '?'. type: str sample: The job completion code (CC) was not available in the job output, please review the job log." @@ -580,29 +589,31 @@ JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) -JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) +JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) MAX_WAIT_TIME_S = 86400 -def submit_src_jcl(module, src, timeout=0, hfs=True, volume=None, start_time=timer()): +def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, start_time=timer()): """ Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set. Arguments: module - module instnace to access the module api - src (str) - JCL, can be relative or absolute paths either on controller or USS - - Data set, can be PS, PDS, PDSE Member - timeout (int) - how long to wait in seconds for a job to complete - hfs (boolean) - True if JCL is a file in USS, otherwise False; Note that all - JCL local to a controller is transfered to USS thus would be - True - volume (str) - volume the data set JCL is located on that will be cataloged before - being submitted - start_time - time the JCL started its submission + src (str) - JCL, can be relative or absolute paths either on controller or USS + - Data set, can be PS, PDS, PDSE Member + src_name (str) - the src name that was provided in the module because through + the runtime src could be replace with a temporary file name + timeout (int) - how long to wait in seconds for a job to complete + hfs (boolean) - True if JCL is a file in USS, otherwise False; Note that all + JCL local to a controller is transfered to USS thus would be + True + volume (str) - volume the data set JCL is located on that will be cataloged before + being submitted + start_time - time the JCL started its submission Returns: - job_submitted_id - the JCL job ID returned from submitting a job, else if no - job submits, None will be returned - duration - how long the job ran for in this method + job_submitted_id - the JCL job ID returned from submitting a job, else if no + job submits, None will be returned + duration - how long the job ran for in this method """ kwargs = { @@ -652,6 +663,7 @@ def submit_src_jcl(module, src, timeout=0, hfs=True, volume=None, start_time=tim # drop through and get analyzed in the main as it will scan the job ouput # Any match to JOB_ERROR_MESSAGES ends our processing and wait times while (job_listing_status not in JOB_ERROR_MESSAGES and + job_listing_status == 'AC' and ((job_listing_rc is None or len(job_listing_rc) == 0 or job_listing_rc == '?') and duration < timeout)): current_time = timer() @@ -660,16 +672,20 @@ def submit_src_jcl(module, src, timeout=0, hfs=True, volume=None, start_time=tim job_listing_rc = jobs.listing(job_submitted.id)[0].rc job_listing_status = jobs.listing(job_submitted.id)[0].status - # ZOAU throws a ZOAUException when the job sumbission fails, not when the - # JCL is non-zero, for non-zero JCL RCs that is caught in the job_output - # processing + # ZOAU throws a ZOAUException when the job sumbission fails thus there is no + # JCL RC to share with the user, if there is a RC, that will be processed + # in the job_output parser. except ZOAUException as err: result["changed"] = False result["failed"] = True result["stderr"] = str(err) - result["msg"] = ("Unable to submit job {0}, a job sumission has returned " - "a non-zero return code, please review the standard error " - "and contact a system administrator.".format(src)) + result["duration"] = duration + result["job_id"] = job_submitted.id if job_submitted else None + result["msg"] = ("Unable to submit job {0}, the job submission has failed. " + "Without the job id, the error can not be determined. " + "Consider using module `zos_job_query` to poll for the " + "job by name or review the system log for purged jobs " + "resulting from an abend.".format(src_name)) module.fail_json(**result) # ZOAU throws a JobSubmitException when timeout has execeeded in that no job_id @@ -684,7 +700,29 @@ def submit_src_jcl(module, src, timeout=0, hfs=True, volume=None, start_time=tim "within the allocated time of {1} seconds. Consider using " " module zos_job_query to poll for a long running " "jobs or increasing the value for " - "'wait_times_s`.".format(src, str(timeout))) + "`wait_times_s`.".format(src_name, str(timeout))) + module.fail_json(**result) + + # Between getting a job_submitted and the jobs.listing(job_submitted.id)[0].rc + # is enough time for the system to purge an invalid job, so catch it and let + # it fall through to the catchall. + except IndexError: + job_submitted = None + + # There appears to be a small fraction of time when ZOAU has a handle on the + # job and and suddenly its purged, this check is to ensure the job is there + # long after the purge else we throw an error here if its been purged. + if job_submitted is None: + result["changed"] = False + result["failed"] = True + result["duration"] = duration + result["job_id"] = job_submitted.id if job_submitted else None + result["msg"] = ("The job {0} has been submitted and no job id was returned " + "within the allocated time of {1} seconds. Without the " + "job id, the error can not be determined, consider using " + "module `zos_job_query` to poll for the job by name or " + "review the system log for purged jobs resulting from an " + "abend.".format(src_name, str(timeout))) module.fail_json(**result) return job_submitted.id if job_submitted else None, duration @@ -786,27 +824,28 @@ def run_module(): # temporary file names for copied files when user sets location to LOCAL temp_file = parsed_args.get("temp_file") temp_file_encoded = None - if temp_file: - temp_file_encoded = NamedTemporaryFile(delete=True) # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) if wait_time_s <= 0 or wait_time_s > MAX_WAIT_TIME_S: result["failed"] = True - result["msg"] = ("The value for option wait_time_s is not valid, it must " - "be greater than 0 and less than " + MAX_WAIT_TIME_S) + result["msg"] = ("The value for option `wait_time_s` is not valid, it must " + "be greater than 0 and less than {0}.".format(str(MAX_WAIT_TIME_S))) module.fail_json(**result) + if temp_file: + temp_file_encoded = NamedTemporaryFile(delete=True) + job_submitted_id = None duration = 0 start_time = timer() if location == "DATA_SET": job_submitted_id, duration = submit_src_jcl( - module, src, wait_time_s, False, volume, start_time=start_time) + module, src, src_name=src, timeout=wait_time_s, hfs=False, volume=volume, start_time=start_time) elif location == "USS": - job_submitted_id, duration = submit_src_jcl(module, src, wait_time_s, True) + job_submitted_id, duration = submit_src_jcl(module, src, src_name=src, timeout=wait_time_s, hfs=True) else: # added -c to iconv to prevent '\r' from erroring as invalid chars to EBCDIC conv_str = "iconv -c -f {0} -t {1} {2} > {3}".format( @@ -823,7 +862,7 @@ def run_module(): if conv_rc == 0: job_submitted_id, duration = submit_src_jcl( - module, temp_file_encoded.name, wait_time_s, True) + module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) else: result["failed"] = True result["stdout"] = stdout @@ -847,6 +886,8 @@ def run_module(): if duration >= wait_time_s: result["failed"] = True result["changed"] = False + if job_output_txt is not None: + result["jobs"] = job_output_txt result["msg"] = ( "The JCL submitted with job id {0} but appears to be a long " "running job that exceeded its maximum wait time of {1} " @@ -860,6 +901,7 @@ def run_module(): is_changed = True if job_output_txt: + result["jobs"] = job_output_txt job_ret_code = job_output_txt[0].get("ret_code") if job_ret_code: @@ -893,8 +935,6 @@ def run_module(): raise Exception("The job return code {0} was non-zero in the " "job output, this job has failed.".format(str(job_code))) - result["jobs"] = job_output_txt - if not return_output: for job in result.get("jobs", []): job["ddnames"] = [] @@ -914,7 +954,7 @@ def run_module(): result["changed"] = False result["msg"] = ("The JCL submitted with job id {0} but " "there was an error, please review " - "the error for further details: {1}.".format + "the error for further details: {1}".format (str(job_submitted_id), str(err))) module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 3106aa292..888281712 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -19,8 +19,24 @@ import tempfile import pytest import re +from pprint import pprint -JCL_FILE_CONTENTS = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, + + +# ############################################################################## +# Configure the job card as needed, most common keyword parameters: +# CLASS: Used to achieve a balance between different types of jobs and avoid +# contention between jobs that use the same resources. +# MSGLEVEL: controls hpw the allocation messages and termination messages are +# printed in the job's output listing (SYSOUT). +# MSGCLASS: assign an output class for your output listing (SYSOUT) +# ############################################################################## + +JCL_FILE_CONTENTS = """//* +//****************************************************************************** +//* Happy path job that prints hello world, returns RC 0 as is. +//****************************************************************************** +//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM //STEP0001 EXEC PGM=IEBGENER //SYSIN DD DUMMY @@ -31,7 +47,13 @@ //SYSUT2 DD SYSOUT=* // """ -JCL_FILE_CONTENTS_R = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, + +JCL_FILE_CONTENTS_BACKSLASH_R = """//* +//****************************************************************************** +//* Happy path job containing backslash r's, returns RC 0 after +//* zos_job_sbumit strips backslash r's, prints Hello world. +//****************************************************************************** +//HELLOR JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM //STEP0001 EXEC PGM=IEBGENER //SYSIN DD DUMMY @@ -42,7 +64,18 @@ //SYSUT2 DD SYSOUT=* // """ -JCL_FILE_CONTENTS_BAD = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, + +JCL_FILE_CONTENTS_BAD = """//* +//****************************************************************************** +//* Negative path job containing !!'s. +//* Returns: +//* ret_code->(code=null, msg=JCL ERROR <int>, msg_text=JCLERR) +//* msg --> The JCL submitted with job id JOB00604 but there was an error, +//* please review the error for further details: The job completion +//* code (CC) was not in the job log. Please review the error +//* JCL ERROR 555 and the job log.", +//****************************************************************************** +//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM //STEP0001 EXEC PGM=IEBGENER //SYSIN DD DUMMY @@ -54,7 +87,7 @@ // """ -JCL_FILE_CONTENTS_30_SEC = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +JCL_FILE_CONTENTS_30_SEC = """//SLEEP30 JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //USSCMD EXEC PGM=BPXBATCH //STDERR DD SYSOUT=* //STDOUT DD SYSOUT=* @@ -65,7 +98,7 @@ // """ -JCL_FILE_CONTENTS_05_SEC = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +JCL_FILE_CONTENTS_05_SEC = """//SLEEP05 JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //USSCMD EXEC PGM=BPXBATCH //STDERR DD SYSOUT=* //STDOUT DD SYSOUT=* @@ -75,6 +108,7 @@ /* // """ + # Should return a max RC of 8 JCL_FILE_CONTENTS_RC_8 = """//RCBADJCL JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //S1 EXEC PGM=IDCAMS @@ -84,6 +118,105 @@ /* """ +JCL_FILE_CONTENTS_NO_DSN = """//* +//****************************************************************************** +//* Job containing a non existent DSN that will force an error. +//* Returns: +//* ret_code->(code=null, msg=JCLERR ?, msg_text=JCLERR, msg_code=?) +//* msg --> The JCL submitted with job id JOB00532 but there was an error, +//* please review the error for further details: The job completion +//* code (CC) was not in the job log. Please review the error +//* JCLERR ? and the job log.", +//****************************************************************************** +//JOBLIBPM JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//JOBLIB DD DSN=DATASET.NOT.EXIST,DISP=SHR +//STEP1 EXEC PGM=HELLOPGM +//SYSPRINT DD SYSOUT=* +//SYSOUT DD SYSOUT=* +// +""" + +# Do not use this test case, although its fine, the problem is it does not trigger +# the correct behavior because ZOAU has a bug such that it will return the last +# job when it can not find what we requested, so this causes the wrong job +# go be found and analyzed. See JCL_FILE_CONTENTS_JCL_ERROR_INT that does actually +# force the code properly find the correct job. +# Fix coming in zoau 1.2.3 +# JCL_FILE_CONTENTS_NO_JOB_CARD = """//STEP0001 EXEC PGM=IEBGENER +# //SYSIN DD DUMMY +# //SYSPRINT DD SYSOUT=* +# //SYSUT1 DD * +# HELLO, WORLD +# /* +# //SYSUT2 DD SYSOUT=* +# // +# """ + + +JCL_FILE_CONTENTS_JCL_ERROR_INT = """//* +//****************************************************************************** +//* Another job containing no job card resulting in a JCLERROR with an value. It +//* won't always be 952, it will increment. +//* Returns: +//* ret_code->(code=null, msg=JCL ERROR 952, msg_text=JCLERR, msg_code=null) +//* msg --> The JCL submitted with job id JOB00728 but there was an error, +//* please review the error for further details: The job completion +//* code (CC) was not in the job log. Please review the error +//* JCL ERROR 952 and the job log. +//****************************************************************************** +//CLGP JOB +//CLG EXEC IGYWCLG +//COBOL.SYSIN DD DSN=IBMUSER.ANSIBLE.COBOL(HELLO),DISP=SHR +""" + +JCL_FILE_CONTENTS_INVALID_USER = """//* +//****************************************************************************** +//* Job containing a USER=FOOBAR that will cause JES to return a SEC ERROR which +//* is a security error. +//* Returns: +//* ret_code->(code=null, msg=SEC ?, msg_text=SEC, msg_code=?) +//* msg --> The JCL submitted with job id JOB00464 but there was an error, +//* please review the error for further details: The job return code +//* was not available in the job log, please review the job log +//* and error SEC ?.", +//****************************************************************************** +//INVUSER JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,USER=FOOBAR +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD +/* +//SYSUT2 DD SYSOUT=* +// +""" + + +JCL_FILE_CONTENTS_TYPRUN_SCAN = """//* +//****************************************************************************** +//* Job containing a TYPRUN=SCAN that will cause JES to run a syntax check and +//* not actually run the JCL. +//* Returns: +//* ret_code->(code=null, msg=? ?, msg_text=?, msg_code=?) +//* msg --> The JCL submitted with job id JOB00620 but there was an error, +//* please review the error for further details: The job return code +//* was not available in the job log, please review the job log +//* and error ? ?.", +//****************************************************************************** +//TYPESCAN JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=SCAN +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD +/* +//SYSUT2 DD SYSOUT=* +// +""" + + TEMP_PATH = "/tmp/jcl" DATA_SET_NAME = "imstestl.ims1.test05" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" @@ -177,7 +310,7 @@ def test_job_submit_LOCAL(ansible_zos_module): def test_job_submit_LOCAL_extraR(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_R) + f.write(JCL_FILE_CONTENTS_BACKSLASH_R) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) @@ -262,6 +395,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): hosts.all.file(path=TEMP_PATH, state="absent") hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): try: hosts = ansible_zos_module @@ -375,6 +509,48 @@ def test_job_submit_max_rc(ansible_zos_module, args): assert result.get("msg") is None assert result.get('changed') is False assert result.get("jobs")[0].get("ret_code").get("code") < 12 - finally: - hosts.all.file(path=tmp_file.name, state="absent") \ No newline at end of file + hosts.all.file(path=tmp_file.name, state="absent") + + +def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_NO_DSN) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'completion code', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + + +# Should have a JCL ERROR <int> +def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_INVALID_USER) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'return code was not available', repr(result.get("msg"))) + assert re.search(r'error SEC', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + +def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'return code was not available', repr(result.get("msg"))) + assert re.search(r'error ? ?', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" From cbfc4cbd33206498c16252553e7dc4d0bfc75c62 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 30 Mar 2023 14:53:37 -0600 Subject: [PATCH 063/495] Added uss_tag_encoding function --- plugins/module_utils/encode.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 7ecabbb5a..9e2ab1d89 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -453,6 +453,23 @@ def mvs_convert_encoding( return convert_rc + def uss_tag_encoding(self, file_path, tag): + """Tag the file/directory specified with the given code set. + If `file_path` is a directory, all of the files and subdirectories will + be tagged recursively. + Arguments: + file_path {str} -- Absolute file path to tag. + tag {str} -- Code set to tag the file/directory. + Raises: + TaggingError: When the chtag command fails. + """ + is_dir = os.path.isdir(file_path) + + tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) + rc, out, err = self.module.run_command(tag_cmd) + if rc != 0: + raise TaggingError(file_path, tag, rc, out, err) + def uss_file_tag(self, file_path): """Returns the current tag set for a file. Arguments: From 22517bc41a6dbe4cc4f05e93dffc70d91ee629dd Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 30 Mar 2023 15:11:58 -0600 Subject: [PATCH 064/495] Fixing linter issues --- plugins/module_utils/encode.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 9e2ab1d89..cfcfd2bf0 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -454,21 +454,21 @@ def mvs_convert_encoding( return convert_rc def uss_tag_encoding(self, file_path, tag): - """Tag the file/directory specified with the given code set. - If `file_path` is a directory, all of the files and subdirectories will - be tagged recursively. - Arguments: - file_path {str} -- Absolute file path to tag. - tag {str} -- Code set to tag the file/directory. - Raises: - TaggingError: When the chtag command fails. - """ - is_dir = os.path.isdir(file_path) - - tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) - rc, out, err = self.module.run_command(tag_cmd) - if rc != 0: - raise TaggingError(file_path, tag, rc, out, err) + """Tag the file/directory specified with the given code set. + If `file_path` is a directory, all of the files and subdirectories will + be tagged recursively. + Arguments: + file_path {str} -- Absolute file path to tag. + tag {str} -- Code set to tag the file/directory. + Raises: + TaggingError: When the chtag command fails. + """ + is_dir = os.path.isdir(file_path) + + tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) + rc, out, err = self.module.run_command(tag_cmd) + if rc != 0: + raise TaggingError(file_path, tag, rc, out, err) def uss_file_tag(self, file_path): """Returns the current tag set for a file. From cf793d900c5ae2a999a64f63fe8ba67acac3ab2b Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 3 Apr 2023 12:06:19 -0400 Subject: [PATCH 065/495] removed extraneous comment on query, eliminated unused variable --- plugins/module_utils/job.py | 2 -- plugins/modules/zos_job_query.py | 8 +------- 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 00ec0407f..c870573a6 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -208,8 +208,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= if owner != entry.owner: continue if job_name != "*": - # if job_name != entry.name: - # continue if not fnmatch.fnmatch(entry.name, job_name): continue if job_id_temp is not None: diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 22968bc62..56646055a 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -227,7 +227,6 @@ def validate_arguments(params): job_name_final = job_name_in job_id = params.get("job_id") - job_id_final = job_id owner = params.get("owner") if job_name_in or job_id: @@ -251,8 +250,6 @@ def validate_arguments(params): o = job_name_pattern.search(job_name_short) if not o: o = job_name_pattern_with_star.search(job_name_short) - if o: - job_name_final = job_name_short # so now, fail if neither m, n, or o=m/n(short) found a match if not o: @@ -272,9 +269,6 @@ def validate_arguments(params): if job_id_short[0:3] in ['JOB', 'TSU', 'STC'] or job_id_short[0:1] in ['J', 'T', 'S']: o = job_id_short - if o: - job_id_final = job_id_short + '*' - if not m and not o: raise RuntimeError("Failed to validate the job id: " + job_id) else: @@ -282,7 +276,7 @@ def validate_arguments(params): if job_id and owner: raise RuntimeError("Argument Error:job id can not be co-exist with owner") - # return job_name_final, job_id_final, owner; + # return job_name_final, id, owner; return job_name_in, job_id, owner From 67448dbe8a93e3f2bc3a62c5e84cd6d7868158c5 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 3 Apr 2023 13:40:22 -0400 Subject: [PATCH 066/495] responding to reviewer comments --- ...os-job-query-handle-multiple-wildcards.yml | 4 +-- plugins/modules/zos_job_query.py | 33 +++++++++---------- .../modules/test_zos_job_query_func.py | 11 ++++++- 3 files changed, 28 insertions(+), 20 deletions(-) diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml index ae2871b9f..a35827e24 100644 --- a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml +++ b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml @@ -1,4 +1,4 @@ -enhancements: +minor_changes: - zos_job_query - This enhancement adjusts the job_name and job_id parameters to handle embedded wildcards. This also required change to job.py/_get_job_status to follow the wildcard feature, using fnmatch logic. - (https://github.com/ansible-collections/ibm_zos_core/pull/---) + (https://github.com/ansible-collections/ibm_zos_core/pull/721) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 56646055a..646032c5a 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -224,7 +224,6 @@ def run_module(): # validate_arguments rturns a tuple, so we don't have to rebuild the job_name string def validate_arguments(params): job_name_in = params.get("job_name") - job_name_final = job_name_in job_id = params.get("job_id") @@ -235,48 +234,48 @@ def validate_arguments(params): job_name_pattern_with_star = re.compile( r"^[a-zA-Z$#@%][0-9a-zA-Z$#@%]{0,6}\*$" ) - m = job_name_pattern.search(job_name_in) - n = job_name_pattern_with_star.search(job_name_in) + test_basic = job_name_pattern.search(job_name_in) + test_star = job_name_pattern_with_star.search(job_name_in) # logic twist: o must be non-null value from m or n - o = m - if n: - o = n + test_result = test_basic + if test_star: + test_result = test_star + job_name_short = "unused" # if neither m nor n were non-null, check if the string needed to be truncated to the first * - if not o: + if not test_result: ix = job_name_in.find("*") if ix >= 0: job_name_short = job_name_in[0:ix + 1] - o = job_name_pattern.search(job_name_short) - if not o: - o = job_name_pattern_with_star.search(job_name_short) + test_result = job_name_pattern.search(job_name_short) + if not test_result: + test_result = job_name_pattern_with_star.search(job_name_short) # so now, fail if neither m, n, or o=m/n(short) found a match - if not o: + if not test_result: raise RuntimeError("Failed to validate the job name: " + job_name_in + " ix was " + ix + " short was " + job_name_short) if job_id: job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") - m = job_id_pattern.search(job_id) - o = None + test_basic = job_id_pattern.search(job_id) + test_result = None - if not m: + if not test_basic: ix = job_id.find("*") if ix > 0: # this differs from job_name, in that we'll drop the star for the search job_id_short = job_id[0:ix] if job_id_short[0:3] in ['JOB', 'TSU', 'STC'] or job_id_short[0:1] in ['J', 'T', 'S']: - o = job_id_short + test_result = job_id_short - if not m and not o: + if not test_basic and not test_result: raise RuntimeError("Failed to validate the job id: " + job_id) else: raise RuntimeError("Argument Error:Either job name(s) or job id is required") if job_id and owner: raise RuntimeError("Argument Error:job id can not be co-exist with owner") - # return job_name_final, id, owner; return job_name_in, job_id, owner diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 32914731c..947b79c70 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -35,7 +35,16 @@ def test_zos_job_query_func(ansible_zos_module): assert result.get("jobs") is not None -# test to show multi wildcard won't crash the search +# test to show multi wildcard in Job_id query won't crash the search +def test_zos_job_query_multi_wildcards_func(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_job_query(job_id="STC*3*") + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs") is not None + +# test to show multi wildcard in Job_name query won't crash the search def test_zos_job_query_multi_wildcards_func(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_query(job_name="JOB*1*", owner="*") From 9b5f063d4c62eca730837419c2141f2bebf8b32c Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 5 Apr 2023 16:04:50 -0400 Subject: [PATCH 067/495] Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. --- plugins/modules/zos_job_query.py | 6 +- .../modules/test_zos_job_query_func.py | 95 +++++++++++++++---- 2 files changed, 80 insertions(+), 21 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 646032c5a..c7758da33 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -236,13 +236,13 @@ def validate_arguments(params): ) test_basic = job_name_pattern.search(job_name_in) test_star = job_name_pattern_with_star.search(job_name_in) - # logic twist: o must be non-null value from m or n + # logic twist: test_result should be a non-null value from test_basic or test_star test_result = test_basic if test_star: test_result = test_star job_name_short = "unused" - # if neither m nor n were non-null, check if the string needed to be truncated to the first * + # if neither test_basic nor test_star were non-null, check if the string needed to be truncated to the first * if not test_result: ix = job_name_in.find("*") if ix >= 0: @@ -251,7 +251,7 @@ def validate_arguments(params): if not test_result: test_result = job_name_pattern_with_star.search(job_name_short) - # so now, fail if neither m, n, or o=m/n(short) found a match + # so now, fail if neither test_basic, test_star or test_base from job_name_short found a match if not test_result: raise RuntimeError("Failed to validate the job name: " + job_name_in + " ix was " + ix + " short was " + job_name_short) diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 947b79c70..f0e53a556 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -15,17 +15,16 @@ __metaclass__ = type -import os -import sys -import warnings - import ansible.constants import ansible.errors import ansible.utils import pytest from pprint import pprint +from shellescape import quote +import tempfile +# Make sure job list * returns something def test_zos_job_query_func(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_query(job_name="*", owner="*") @@ -34,21 +33,81 @@ def test_zos_job_query_func(ansible_zos_module): assert result.get("changed") is False assert result.get("jobs") is not None +JCLQ_FILE_CONTENTS = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD +/* +//SYSUT2 DD SYSOUT=* +// +""" + +TEMP_PATH = "/tmp/jcl" +JDATA_SET_NAME = "imstestl.ims1.testq1" +NDATA_SET_NAME = "imstestl.ims1.testq2" +DEFAULT_VOLUME = "000000" # test to show multi wildcard in Job_id query won't crash the search -def test_zos_job_query_multi_wildcards_func(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_job_query(job_id="STC*3*") - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") is False - assert result.get("jobs") is not None +def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): + try: + hosts = ansible_zos_module + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) + ) + hosts.all.zos_data_set( + name=JDATA_SET_NAME, state="present", type="pds", replace=True + ) + hosts.all.shell( + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) + ) + results = hosts.all.zos_job_submit( + src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="DATA_SET", wait=True + ) + for result in results.contacted.values(): + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + + fulljobid = result.get("jobs")[0].get("res_code").get("job_id") + jobmask = fulljobid[0:3] + '*' + fulljobid[5:6] + '*' + qresults = hosts.all.zos_job_query(jobmask) + for qresult in qresults.contacted.values(): + assert qresult.get("jobs") is not None + + finally: + hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.zos_data_set(name=JDATA_SET_NAME, state="absent") + # test to show multi wildcard in Job_name query won't crash the search -def test_zos_job_query_multi_wildcards_func(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_job_query(job_name="JOB*1*", owner="*") - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") is False - assert result.get("jobs") is not None \ No newline at end of file +def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): + try: + hosts = ansible_zos_module + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) + ) + hosts.all.zos_data_set( + name=NDATA_SET_NAME, state="present", type="pds", replace=True + ) + hosts.all.shell( + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) + ) + results = hosts.all.zos_job_submit( + src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="DATA_SET", wait=True + ) + for result in results.contacted.values(): + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + + jobname = "HE*L*" + qresults = hosts.all.zos_job_query(job_name=jobname, owner="*") + for qresult in qresults.contacted.values(): + assert qresult.get("jobs") is not None + + finally: + hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.zos_data_set(name=NDATA_SET_NAME, state="absent") From 8c716d32d27fe6fa94de2f073c7ddb3577a4d6cb Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 5 Apr 2023 16:23:52 -0400 Subject: [PATCH 068/495] Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. --- plugins/modules/zos_job_query.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index c7758da33..64e3ad09b 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -35,6 +35,7 @@ job_name: description: - The job name to query. + - Job name can now contain multiple, embedded asterisks (e.g.: JC*NAM*) type: str required: False default: "*" @@ -51,6 +52,7 @@ with STC, JOB, TSU and are followed by 5 digits. When job are potentially greater than 99,999, the job number format will begin with S, J, T and are followed by 7 digits. + - Job id can now contain multiple, embedded asterisks (e.g.: JOB*14*) type: str required: False """ @@ -64,6 +66,14 @@ zos_job_query: job_name: "IYK3*" +- name: list the jobs that match 'IYKsomethingNAsomething' + zos_job_query: + job_name: "IYK*NA*" + +- name: list the jobs with JOB in the x014x range only + zos_job_query: + job_idname: JOB*014* + - name: list the job with a jobname 'IYK3ZNA*' and jobid as JOB01427 zos_job_query: job_name: IYK3ZNA* @@ -253,7 +263,7 @@ def validate_arguments(params): # so now, fail if neither test_basic, test_star or test_base from job_name_short found a match if not test_result: - raise RuntimeError("Failed to validate the job name: " + job_name_in + " ix was " + ix + " short was " + job_name_short) + raise RuntimeError("Unable to locate job name {0}.".format(job_name_in)) if job_id: job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") From d5fc637474fac83f5b7669ce39483ab7aca74be9 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 6 Apr 2023 09:55:04 -0400 Subject: [PATCH 069/495] Corrected 2 documentation errors --- plugins/modules/zos_job_query.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 64e3ad09b..ae6c5a9ac 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -34,8 +34,8 @@ options: job_name: description: - - The job name to query. - - Job name can now contain multiple, embedded asterisks (e.g.: JC*NAM*) + - The job name to query. Job name can now contain multiple, + embedded asterisks (e.g.: JC*NAM*) type: str required: False default: "*" @@ -51,8 +51,8 @@ - The job number that has been assigned to the job. These normally begin with STC, JOB, TSU and are followed by 5 digits. When job are potentially greater than 99,999, the job number format will begin with - S, J, T and are followed by 7 digits. - - Job id can now contain multiple, embedded asterisks (e.g.: JOB*14*) + S, J, T and are followed by 7 digits. Job id can now contain multiple, + embedded asterisks (e.g.: JOB*14*) type: str required: False """ From 5f56158619ef4e774eb465a761eb3aa9917a8214 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 6 Apr 2023 10:04:55 -0400 Subject: [PATCH 070/495] Change to documentation text (indent on multi line string?) --- plugins/modules/zos_job_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index ae6c5a9ac..2a149cb46 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -35,7 +35,7 @@ job_name: description: - The job name to query. Job name can now contain multiple, - embedded asterisks (e.g.: JC*NAM*) + embedded asterisks (e.g.: JC*NAM*) type: str required: False default: "*" From baaabe874700ad29e1e889c552df3978b7d953ed Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 6 Apr 2023 10:14:47 -0400 Subject: [PATCH 071/495] Still trying to get documentation to pass --- plugins/modules/zos_job_query.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 2a149cb46..7b383d668 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -34,8 +34,7 @@ options: job_name: description: - - The job name to query. Job name can now contain multiple, - embedded asterisks (e.g.: JC*NAM*) + - The job name to query. Job name can now contain multiple embedded asterisks. type: str required: False default: "*" @@ -52,7 +51,7 @@ with STC, JOB, TSU and are followed by 5 digits. When job are potentially greater than 99,999, the job number format will begin with S, J, T and are followed by 7 digits. Job id can now contain multiple, - embedded asterisks (e.g.: JOB*14*) + embedded asterisks. type: str required: False """ From 5e6cc4c3c6fcb2e1f3387f532808ed93749b264a Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 6 Apr 2023 10:24:06 -0400 Subject: [PATCH 072/495] Looks like '---' was killing documentation block. --- plugins/modules/zos_job_query.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 7b383d668..3870440d8 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -18,7 +18,6 @@ DOCUMENTATION = r""" ---- module: zos_job_query version_added: '1.0.0' short_description: Query job status From 165863093ed66b94cff987f35320e5fee16a7028 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 7 Apr 2023 10:20:41 -0700 Subject: [PATCH 073/495] Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../727-zos-blockinfile-examples.yml | 5 +++ docs/source/modules/zos_blockinfile.rst | 38 +++++++++++++++++-- plugins/modules/zos_blockinfile.py | 38 +++++++++++++++++-- 3 files changed, 75 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/727-zos-blockinfile-examples.yml diff --git a/changelogs/fragments/727-zos-blockinfile-examples.yml b/changelogs/fragments/727-zos-blockinfile-examples.yml new file mode 100644 index 000000000..f1c94c12b --- /dev/null +++ b/changelogs/fragments/727-zos-blockinfile-examples.yml @@ -0,0 +1,5 @@ +trivial: +- zos_blockinfile - was missing examples using Jinja2 and files. This change + adds a Jinja2 example in both the src and block content. It also includes + an example using a file as source. + (https://github.com/ansible-collections/ibm_zos_core/pull/727) \ No newline at end of file diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 6e6aae737..5608a0ebb 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -38,7 +38,9 @@ src state - Whether the block should be inserted/replaced (present) or removed (absent). + Whether the block should be inserted or replaced using *state=present*. + + Whether the block should be removed using *state=absent*. | **required**: False | **type**: str @@ -165,7 +167,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The ``-f`` option enables sharing of data sets through the disposition *DISP=SHR*. + The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. | **required**: False | **type**: bool @@ -244,6 +246,36 @@ Examples LIB('{{ DB2RUN }}.RUNLIB.LOAD') indentation: 16 + - name: Set facts for the following two tasks. + set_fact: + HLQ: 'ANSIBLE' + MLQ: 'MEMBER' + LLQ: 'TEST' + MEM: '(JCL)' + MSG: 'your first JCL program' + CONTENT: "{{ lookup('file', 'files/content.txt') }}" + + - name: Update JCL in a PDS member with Jinja2 variable syntax. + zos_blockinfile: + src: "{{ HLQ }}.{{MLQ}}.{{LLQ}}{{MEM}}" + insertafter: "HELLO, WORLD" + marker: "//* {mark} *//" + marker_begin: "Begin Ansible Block Insertion 1" + marker_end: "End Ansible Block Insertion 1" + state: present + block: | + This is {{ MSG }}, and its now + managed by Ansible. + + - name: Update JCL in PDS member with content from a file. + zos_blockinfile: + src: "{{ HLQ }}.{{MLQ}}.{{LLQ}}{{MEM}}" + insertafter: "End Ansible Block Insertion 1" + marker: "//* {mark} *//" + marker_begin: "Begin Ansible Block Insertion 2" + marker_end: "End Ansible Block Insertion 2" + block: "{{ CONTENT }}" + @@ -257,7 +289,7 @@ Notes For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - When using 'with_*' loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. + When using ``with_*`` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. When more then one block should be handled in a file you must change the *marker* per task. diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index b7bda8211..9beceab68 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -22,6 +22,7 @@ version_added: '1.3.0' author: - "Behnam (@balkajbaf)" + - "Demetrios Dimatos (@ddimatos)" short_description: Manage block of multi-line textual data on z/OS description: - Manage block of multi-lines in z/OS UNIX System Services (USS) files, @@ -42,7 +43,8 @@ required: true state: description: - - Whether the block should be inserted/replaced (present) or removed (absent). + - Whether the block should be inserted or replaced using I(state=present). + - Whether the block should be removed using I(state=absent). type: str choices: - absent @@ -156,7 +158,7 @@ updated by others. - This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - - The C(-f) option enables sharing of data sets through the disposition + - The C(force) option enables sharing of data sets through the disposition I(DISP=SHR). required: false type: bool @@ -179,7 +181,7 @@ data sets. - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - - When using 'with_*' loops be aware that if you do not set a unique mark + - When using ``with_*`` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. - When more then one block should be handled in a file you must change the I(marker) per task. @@ -245,6 +247,36 @@ RUN PROGRAM(DSNTEP2) PLAN(DSNTEP12) - LIB('{{ DB2RUN }}.RUNLIB.LOAD') indentation: 16 + +- name: Set facts for the following two tasks. + set_fact: + HLQ: 'ANSIBLE' + MLQ: 'MEMBER' + LLQ: 'TEST' + MEM: '(JCL)' + MSG: 'your first JCL program' + CONTENT: "{{ lookup('file', 'files/content.txt') }}" + +- name: Update JCL in a PDS member with Jinja2 variable syntax. + zos_blockinfile: + src: "{{ HLQ }}.{{MLQ}}.{{LLQ}}{{MEM}}" + insertafter: "HELLO, WORLD" + marker: "//* {mark} *//" + marker_begin: "Begin Ansible Block Insertion 1" + marker_end: "End Ansible Block Insertion 1" + state: present + block: | + This is {{ MSG }}, and its now + managed by Ansible. + +- name: Update JCL in PDS member with content from a file. + zos_blockinfile: + src: "{{ HLQ }}.{{MLQ}}.{{LLQ}}{{MEM}}" + insertafter: "End Ansible Block Insertion 1" + marker: "//* {mark} *//" + marker_begin: "Begin Ansible Block Insertion 2" + marker_end: "End Ansible Block Insertion 2" + block: "{{ CONTENT }}" ''' RETURN = r""" From 1123f97b0d2c7e39edf8d039f024fe1dd86a74c5 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 7 Apr 2023 12:58:30 -0700 Subject: [PATCH 074/495] Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/729-zos_operator-example-added.yml | 4 ++++ docs/source/modules/zos_operator.rst | 4 ++++ plugins/modules/zos_operator.py | 4 ++++ 3 files changed, 12 insertions(+) create mode 100644 changelogs/fragments/729-zos_operator-example-added.yml diff --git a/changelogs/fragments/729-zos_operator-example-added.yml b/changelogs/fragments/729-zos_operator-example-added.yml new file mode 100644 index 000000000..46cb6ab84 --- /dev/null +++ b/changelogs/fragments/729-zos_operator-example-added.yml @@ -0,0 +1,4 @@ +trivial: +- zos_operator - had a need for more command examples. This change adds the + D SYMBOLS example. + (https://github.com/ansible-collections/ibm_zos_core/pull/730) \ No newline at end of file diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 868c78a10..b05b0331a 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -101,6 +101,10 @@ Examples cmd: 'd a,all' wait_time_s: 7 + - name: Display the system symbols and associated substitution texts. + zos_operator: + cmd: 'D SYMBOLS' + diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 56f2170c5..a0f66c302 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -90,6 +90,10 @@ zos_operator: cmd: 'd a,all' wait_time_s: 7 + +- name: Display the system symbols and associated substitution texts. + zos_operator: + cmd: 'D SYMBOLS' """ RETURN = r""" From 198476984fe2297ac15aeaf3d64d0ad11079512a Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Fri, 7 Apr 2023 14:37:03 -0700 Subject: [PATCH 075/495] zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../663-zos_gather_facts-update-docstring.yml | 2 ++ docs/source/modules/zos_gather_facts.rst | 24 +++++++++++++++++++ plugins/modules/zos_gather_facts.py | 19 +++++++++++++++ 3 files changed, 45 insertions(+) create mode 100644 changelogs/fragments/663-zos_gather_facts-update-docstring.yml diff --git a/changelogs/fragments/663-zos_gather_facts-update-docstring.yml b/changelogs/fragments/663-zos_gather_facts-update-docstring.yml new file mode 100644 index 000000000..d6ba48dd7 --- /dev/null +++ b/changelogs/fragments/663-zos_gather_facts-update-docstring.yml @@ -0,0 +1,2 @@ +trivial: +- zos_gather_facts - add sample output to RETURN docstring. (https://github.com/ansible-collections/ibm_zos_core/pull/722) \ No newline at end of file diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 836421256..63bd22701 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -96,4 +96,28 @@ ansible_facts | **returned**: when collected | **type**: dict + | **sample**: + + .. code-block:: json + + [ + { + "ansible_facts": { + "arch_level": "2", + "hw_name": "SYSZD6", + "ipl_volume": "RES820", + "lpar_name": "SVLLAB01", + "primary_jes": "JES2", + "product_mod_level": "00", + "product_name": "z/OS", + "product_owner": "IBM CORP", + "product_release": "05", + "product_version": "02", + "smf_name": "3090", + "sys_name": "EC33018A", + "sysplex_name": "SVPLEX1", + "vm_name": "EC33018A" + } + } + ] diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index e18dcb288..beff12cd2 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -88,6 +88,25 @@ description: Collection of facts that are gathered from the z/OS systems. returned: when collected type: dict + sample: + [ + "ansible_facts": { + "arch_level": "2", + "hw_name": "SYSZD6", + "ipl_volume": "RES820", + "lpar_name": "SVLLAB01", + "primary_jes": "JES2", + "product_mod_level": "00", + "product_name": "z/OS", + "product_owner": "IBM CORP", + "product_release": "05", + "product_version": "02", + "smf_name": "3090", + "sys_name": "EC33018A", + "sysplex_name": "SVPLEX1", + "vm_name": "EC33018A" + } + ] """ from fnmatch import fnmatch From 455c9c099e1e3fda6ced3dc84a387a61e1aa2796 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 7 Apr 2023 19:38:42 -0400 Subject: [PATCH 076/495] 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. --- .../574-zos_find_stoppedonnotfound.yml | 4 +++ plugins/modules/zos_find.py | 3 ++- .../functional/modules/test_zos_find_func.py | 26 +++++++++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/574-zos_find_stoppedonnotfound.yml diff --git a/changelogs/fragments/574-zos_find_stoppedonnotfound.yml b/changelogs/fragments/574-zos_find_stoppedonnotfound.yml new file mode 100644 index 000000000..48eebe523 --- /dev/null +++ b/changelogs/fragments/574-zos_find_stoppedonnotfound.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_find - fixes a bug where find result values stopped being returned after + first value in a list was 'not found'. + (https://github.com/ansible-collections/ibm_zos_core/pull/668) diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index c290657ac..b49d65f04 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -336,7 +336,8 @@ def data_set_filter(module, pds_paths, patterns): rc, out, err = _dls_wrapper(pattern, list_details=True) if rc != 0: if "BGYSC1103E" in err: - return filtered_data_sets + # return filtered_data_sets + continue module.fail_json( msg="Non-zero return code received while executing ZOAU shell command 'dls'", diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 7349b134f..04dfb7368 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -312,3 +312,29 @@ def test_find_non_existent_data_set_members(ansible_zos_module): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 + + +def test_find_mixed_members_from_pds_paths(ansible_zos_module): + hosts = ansible_zos_module + try: + hosts.all.zos_data_set( + batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES] + ) + hosts.all.zos_data_set( + batch=[dict(name=i + "(MEMBER)", type="MEMBER") for i in PDS_NAMES] + ) + hosts.all.zos_data_set( + batch=[dict(name=i + "(FILE)", type="MEMBER") for i in PDS_NAMES] + ) + find_res = hosts.all.zos_find( + pds_paths=['TEST.NONE.PDS.*','TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] + ) + print(vars(find_res)) + for val in find_res.contacted.values(): + assert len(val.get('data_sets')) == 3 + for ds in val.get('data_sets'): + assert len(ds.get('members')) == 1 + finally: + hosts.all.zos_data_set( + batch=[dict(name=i, state='absent') for i in PDS_NAMES] + ) From 413461fd953ef14456046954552bbbcc4a7a8afe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 7 Apr 2023 18:00:02 -0600 Subject: [PATCH 077/495] zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> --- ...can-quotes-in-content-can-be-supported.yml | 5 +++ plugins/modules/zos_blockinfile.py | 24 ++++++++++--- .../modules/test_zos_blockinfile_func.py | 35 +++++++++++++++++++ 3 files changed, 60 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml diff --git a/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml b/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml new file mode 100644 index 000000000..ebd99af7a --- /dev/null +++ b/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml @@ -0,0 +1,5 @@ +bugfixes: +- zos_blockinfile - was unable to use double quotes which prevented some use + cases and did not display an approriate message. The fix now allows for + double quotes to be used with the module. + (https://github.com/ansible-collections/ibm_zos_core/pull/680) \ No newline at end of file diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 9beceab68..c9e504740 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -248,6 +248,14 @@ LIB('{{ DB2RUN }}.RUNLIB.LOAD') indentation: 16 +- name: Update a script with commands containing quotes. + zos_blockinfile: + src: "/u/scripts/script.sh" + insertafter: "EOF" + block: | + cat "//'{{ DS_NAME }}'" + cat "//'{{ DS_NAME_2 }}'" + - name: Set facts for the following two tasks. set_fact: HLQ: 'ANSIBLE' @@ -414,6 +422,12 @@ def quotedString(string): return string.replace('"', "") +def quoted_string_output_json(string): + if not isinstance(string, str): + return string + return string.replace('"', "u'") + + def main(): module = AnsibleModule( argument_spec=dict( @@ -570,7 +584,7 @@ def main(): # state=present, insert/replace a block with matching regex pattern # state=absent, delete blocks with matching regex pattern if parsed_args.get('state') == 'present': - return_content = present(src, quotedString(block), quotedString(marker), quotedString(ins_aft), quotedString(ins_bef), encoding, force) + return_content = present(src, block, quotedString(marker), quotedString(ins_aft), quotedString(ins_bef), encoding, force) else: return_content = absent(src, quotedString(marker), encoding, force) stdout = return_content.stdout_response @@ -584,13 +598,15 @@ def main(): stdout = stdout.replace('$ a\\', '$ a\\\\') stdout = stdout.replace('1 i\\', '1 i\\\\') if block: - stdout = stdout.replace(block, quotedString(block)) + stdout = stdout.replace(block, quoted_string_output_json(block)) if ins_aft: - stdout = stdout.replace(ins_aft, quotedString(ins_aft)) + stdout = stdout.replace(ins_aft, quoted_string_output_json(ins_aft)) if ins_bef: - stdout = stdout.replace(ins_bef, quotedString(ins_bef)) + stdout = stdout.replace(ins_bef, quoted_string_output_json(ins_bef)) # Try to extract information from stdout ret = json.loads(stdout) + ret['cmd'] = ret['cmd'].replace("u'", '"') + result['cmd'] = ret['cmd'] result['changed'] = ret['changed'] result['found'] = ret['found'] diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 5e29674e4..37f1818d4 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -133,6 +133,16 @@ export PYTHON_HOME export _BPXK_AUTOCVT""" +TEST_CONTENT_DOUBLEQUOTES = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//USSCMD EXEC PGM=BPXBATCH +//STDERR DD SYSOUT=* +//STDOUT DD SYSOUT=* +//STDPARM DD * +SH ls -la /; +sleep 30; +/* +//""" + # supported data set types # DS_TYPE = ['SEQ', 'PDS', 'PDSE'] DS_TYPE = ['SEQ'] @@ -204,6 +214,9 @@ test_uss_block_insert_with_indentation_level_specified=dict( insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16), + test_uss_block_insert_with_doublequotes=dict( + insertafter="sleep 30;", block='cat \"//OMVSADMI.CAT\"\ncat \"//OMVSADM.COPYMEM.TESTS\" > test.txt', + marker="// {mark} ANSIBLE MANAGED BLOCK",state="present"), test_ds_block_insertafter_regex=dict(test_name="T1"), test_ds_block_insertbefore_regex=dict(test_name="T2"), test_ds_block_insertafter_eof=dict(test_name="T3"), @@ -264,6 +277,19 @@ export PKG_CONFIG_PATH export PYTHON_HOME export _BPXK_AUTOCVT""", + test_uss_block_insert_with_doublequotes="""//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//USSCMD EXEC PGM=BPXBATCH +//STDERR DD SYSOUT=* +//STDOUT DD SYSOUT=* +//STDPARM DD * +SH ls -la /; +sleep 30; +// BEGIN ANSIBLE MANAGED BLOCK +cat "//OMVSADMI.CAT" +cat "//OMVSADM.COPYMEM.TESTS" > test.txt +// END ANSIBLE MANAGED BLOCK +/* +//""", test_uss_block_insertbefore_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none @@ -1174,6 +1200,15 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): TEST_INFO["expected"]["test_uss_block_insert_with_indentation_level_specified"]) +@pytest.mark.uss +def test_uss_block_insert_with_doublequotes(ansible_zos_module): + TEST_ENV["TEST_CONT"] = TEST_CONTENT_DOUBLEQUOTES + UssGeneral( + "test_uss_block_insert_with_doublequotes", ansible_zos_module,TEST_ENV, + TEST_INFO["test_uss_block_insert_with_doublequotes"], + TEST_INFO["expected"]["test_uss_block_insert_with_doublequotes"]) + TEST_ENV["TEST_CONT"] = TEST_CONTENT + @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): try: From d361802aa8f7a97c25d61682c5d11c2a91656783 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Sat, 8 Apr 2023 00:56:59 -0600 Subject: [PATCH 078/495] zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> --- ...es-is-applied-to-destination-directory.yml | 3 +++ plugins/module_utils/encode.py | 19 +++++++++++++++++++ plugins/modules/zos_blockinfile.py | 12 ------------ plugins/modules/zos_copy.py | 4 ++-- .../modules/test_zos_blockinfile_func.py | 2 +- 5 files changed, 25 insertions(+), 15 deletions(-) create mode 100644 changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml diff --git a/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml b/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml new file mode 100644 index 000000000..970741107 --- /dev/null +++ b/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml @@ -0,0 +1,3 @@ +minor_changes: +- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. + (https://github.com/ansible-collections/ibm_zos_core/pull/723) \ No newline at end of file diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index cfcfd2bf0..fa84c6fb3 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -496,6 +496,25 @@ def uss_file_tag(self, file_path): except Exception: return None + def uss_tag_encoding(self, file_path, tag): + """Tag the file/directory specified with the given code set. + If `file_path` is a directory, all of the files and subdirectories will + be tagged recursively. + + Arguments: + file_path {str} -- Absolute file path to tag. + tag {str} -- Code set to tag the file/directory. + + Raises: + TaggingError: When the chtag command fails. + """ + is_dir = os.path.isdir(file_path) + + tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) + rc, out, err = self.module.run_command(tag_cmd) + if rc != 0: + raise TaggingError(file_path, tag, rc, out, err) + class EncodeError(Exception): def __init__(self, message): diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index c9e504740..014382f1e 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -197,13 +197,11 @@ block: | MOUNT FILESYSTEM('SOME.DATA.SET') TYPE(ZFS) MODE(READ) MOUNTPOINT('/tmp/src/somedirectory') - - name: Remove a library as well as surrounding markers zos_blockinfile: state: absent src: SYS1.PARMLIB(PROG00) marker: "/* {mark} ANSIBLE MANAGED BLOCK FOR SOME.DATA.SET */" - - name: Add ZOAU path to PATH in /etc/profile zos_blockinfile: src: /etc/profile @@ -212,7 +210,6 @@ ZOAU=/path/to/zoau_dir/bin export ZOAU PATH=$ZOAU:$PATH - - name: Insert/Update HTML surrounded by custom markers after <body> line zos_blockinfile: path: /var/www/html/index.html @@ -221,13 +218,11 @@ block: | <h1>Welcome to {{ ansible_hostname }}</h1> <p>Last updated on {{ ansible_date_time.iso8601 }}</p> - - name: Remove HTML as well as surrounding markers zos_blockinfile: path: /var/www/html/index.html state: absent marker: "<!-- {mark} ANSIBLE MANAGED BLOCK -->" - - name: Add mappings to /etc/hosts zos_blockinfile: path: /etc/hosts @@ -238,7 +233,6 @@ - { name: host1, ip: 10.10.1.10 } - { name: host2, ip: 10.10.1.11 } - { name: host3, ip: 10.10.1.12 } - - name: Add a code block to a member using a predefined indentation. zos_blockinfile: path: SYS1.PARMLIB(BPXPRM00) @@ -348,12 +342,10 @@ def transformBlock(block, indentation_char, indentation_spaces): """Prepends the specified number of spaces to the block in all lines - Arguments: block: {str} -- The block text to be transformed. indentation_char: {str} -- The indentation char to be used. indentation_spaces: {int} -- Number of times the indentation char to prepend. - Returns: block: {str} -- The text block after applying the necessary transformations. """ @@ -372,7 +364,6 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): """Replace a block with the matching regex pattern Insert a block before/after the matching pattern Insert a block at BOF/EOF - Arguments: src: {str} -- The z/OS USS file or data set to modify. block: {str} -- The block to insert/replace into the src. @@ -387,7 +378,6 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): - '*regex*' encoding: {str} -- Encoding of the src. force: {str} -- If not empty passes the -f option to dmod cmd. - Returns: str -- Information in JSON format. keys: cmd: {str} -- dmod shell command @@ -399,13 +389,11 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): def absent(src, marker, encoding, force): """Delete blocks with matching regex pattern - Arguments: src: {str} -- The z/OS USS file or data set to modify. marker: {str} -- Identifies the block to be removed. encoding: {str} -- Encoding of the src. force: {str} -- If not empty passes the -f option to dmod cmd. - Returns: str -- Information in JSON format. keys: cmd: {str} -- dmod shell command diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 57a16545e..2fe9ffd4c 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1056,8 +1056,8 @@ def copy_to_uss( group = self.common_file_args.get("group") owner = self.common_file_args.get("owner") if mode is not None: - self.module.set_mode_if_different(dest, mode, False) - + if not os.path.isdir(dest): + self.module.set_mode_if_different(dest, mode, False) if changed_files: for filepath in changed_files: self.module.set_mode_if_different(os.path.join(dest, filepath), mode, False) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 37f1818d4..f6b735487 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1545,4 +1545,4 @@ def test_ds_not_supported(ansible_zos_module, dstype): DsNotSupportedHelper( TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], ansible_zos_module, TEST_ENV, TEST_INFO["test_uss_block_insertafter_regex"] - ) + ) \ No newline at end of file From 2df8bfe407d0da5ba6e6879fe795fef2e24d2291 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Sun, 9 Apr 2023 23:23:51 -0400 Subject: [PATCH 079/495] corrected job test case that wanted to extract job id. --- tests/functional/modules/test_zos_job_query_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index f0e53a556..3386467b5 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -71,7 +71,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 - fulljobid = result.get("jobs")[0].get("res_code").get("job_id") + fulljobid = result.get("jobs")[0].get("job_id") jobmask = fulljobid[0:3] + '*' + fulljobid[5:6] + '*' qresults = hosts.all.zos_job_query(jobmask) for qresult in qresults.contacted.values(): From 18126332afb9f99c2af44b35285fd1977d068ba5 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 10 Apr 2023 11:30:56 -0400 Subject: [PATCH 080/495] changed call to zos_job_query in the functional test. --- tests/functional/modules/test_zos_job_query_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 3386467b5..0231cc874 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -73,7 +73,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): fulljobid = result.get("jobs")[0].get("job_id") jobmask = fulljobid[0:3] + '*' + fulljobid[5:6] + '*' - qresults = hosts.all.zos_job_query(jobmask) + qresults = hosts.all.zos_job_query(job_id=jobmask) for qresult in qresults.contacted.values(): assert qresult.get("jobs") is not None From 4f4c2644c31cfd178a032336f1e730a0e181dc4a Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Mon, 10 Apr 2023 14:11:21 -0700 Subject: [PATCH 081/495] zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- ...8-zos-data-set-support-disposition-shr.yml | 2 + plugins/module_utils/data_set.py | 19 ++- plugins/modules/zos_data_set.py | 71 ++++++++- .../modules/test_zos_data_set_func.py | 140 ++++++++++++++++++ 4 files changed, 226 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/358-zos-data-set-support-disposition-shr.yml diff --git a/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml b/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml new file mode 100644 index 000000000..4102bab0d --- /dev/null +++ b/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml @@ -0,0 +1,2 @@ +minor_changes: + - zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 2549c345c..8295a6541 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -116,6 +116,7 @@ def ensure_present( sms_management_class=None, volumes=None, tmp_hlq=None, + force=None, ): """Creates data set if it does not already exist. @@ -171,6 +172,8 @@ def ensure_present( has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. Defaults to None. tmp_hlq (str, optional): High level qualifier for temporary datasets. + force (bool, optional): Used to determine behavior when performing member operations on a pdse. + Defaults to None. Returns: bool -- Indicates if changes were made. @@ -247,11 +250,11 @@ def ensure_member_present(name, replace=False): return True @staticmethod - def ensure_member_absent(name): + def ensure_member_absent(name, force=False): """Deletes provided data set member if it exists. Returns a boolean indicating if changes were made.""" if DataSet.data_set_member_exists(name): - DataSet.delete_member(name) + DataSet.delete_member(name, force) return True return False @@ -772,6 +775,7 @@ def replace( sms_management_class=None, volumes=None, tmp_hlq=None, + force=None, ): """Attempts to replace an existing data set. @@ -826,6 +830,8 @@ def replace( has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. Defaults to None. tmp_hlq (str, optional): High level qualifier for temporary datasets. + force (bool, optional): Used to determine behavior when performing member operations on a pdse. + Defaults to None. """ arguments = locals() DataSet.delete(name) @@ -884,6 +890,7 @@ def create( sms_management_class=None, volumes=None, tmp_hlq=None, + force=None, ): """A wrapper around zoautil_py Dataset.create() to raise exceptions on failure. @@ -940,6 +947,8 @@ def create( has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. Defaults to None. tmp_hlq (str, optional): High level qualifier for temporary datasets. + force (bool, optional): Used to determine behavior when performing member operations on a pdse. + Defaults to None. Raises: DatasetCreateError: When data set creation fails. """ @@ -992,7 +1001,7 @@ def create_member(name): raise DatasetMemberCreateError(name, rc) @staticmethod - def delete_member(name): + def delete_member(name, force=False): """A wrapper around zoautil_py Dataset.delete_members() to raise exceptions on failure. @@ -1002,7 +1011,7 @@ def delete_member(name): Raises: DatasetMemberDeleteError: When data set member deletion fails. """ - rc = datasets.delete_members(name) + rc = datasets.delete_members(name, force=force) if rc > 0: raise DatasetMemberDeleteError(name, rc) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index c3a6936d7..3e7ee1700 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -43,6 +43,10 @@ - > If I(state=absent) and the data set does exist on the managed node, remove the data set, module completes successfully with I(changed=True). + - > + If I(state=absent) and I(type=MEMBER) and I(force=True), the data set + will be opened with I(DISP=SHR) such that the entire data set can be + accessed by other processes while the specified member is deleted. - > If I(state=absent) and I(volumes) is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied @@ -247,6 +251,20 @@ that is not available, then the value C(TMPHLQ) is used. required: false type: str + force: + description: + - Specifies that the data set can be shared with others during a member + delete operation which results in the data set you are updating to be + simultaneously updated by others. + - This is helpful when a data set is being used in a long running process + such as a started task and you are wanting to delete a member. + - The I(force=True) option enables sharing of data sets through the + disposition I(DISP=SHR). + - The I(force=True) only applies to data set members when I(state=absent) + and I(type=MEMBER). + type: bool + required: false + default: false batch: description: - Batch can be used to perform operations on multiple data sets in a single module call. @@ -271,6 +289,11 @@ - > If I(state=absent) and the data set does exist on the managed node, remove the data set, module completes successfully with I(changed=True). + - > + If I(state=absent) and I(type=MEMBER) and I(force=True), the data + set will be opened with I(DISP=SHR) such that the entire data set + can be accessed by other processes while the specified member is + deleted. - > If I(state=absent) and I(volumes) is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied @@ -467,6 +490,21 @@ type: bool required: false default: false + force: + description: + - Specifies that the data set can be shared with others during a member + delete operation which results in the data set you are updating to + be simultaneously updated by others. + - This is helpful when a data set is being used in a long running + process such as a started task and you are wanting to delete a + member. + - The I(force=True) option enables sharing of data sets through the + disposition I(DISP=SHR). + - The I(force=True) only applies to data set members when + I(state=absent) and I(type=MEMBER). + type: bool + required: false + default: false """ EXAMPLES = r""" @@ -552,6 +590,13 @@ state: absent type: MEMBER +- name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR + zos_data_set: + name: someds.name.here(mydata) + state: absent + type: MEMBER + force: yes + - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: @@ -894,6 +939,9 @@ def perform_data_set_operations(name, state, **extra_args): """Calls functions to perform desired operations on one or more data sets. Returns boolean indicating if changes were made.""" changed = False + # passing in **extra_args forced me to modify the acceptable parameters + # for multiple functions in data_set.py including ensure_present, replace + # and create where the force parameter has no bearing. if state == "present" and extra_args.get("type") != "MEMBER": changed = DataSet.ensure_present(name, **extra_args) elif state == "present" and extra_args.get("type") == "MEMBER": @@ -901,7 +949,7 @@ def perform_data_set_operations(name, state, **extra_args): elif state == "absent" and extra_args.get("type") != "MEMBER": changed = DataSet.ensure_absent(name, extra_args.get("volumes")) elif state == "absent" and extra_args.get("type") == "MEMBER": - changed = DataSet.ensure_member_absent(name) + changed = DataSet.ensure_member_absent(name, extra_args.get("force")) elif state == "cataloged": changed = DataSet.ensure_cataloged(name, extra_args.get("volumes")) elif state == "uncataloged": @@ -1017,6 +1065,11 @@ def parse_and_validate_args(params): aliases=["volume"], dependencies=["state"], ), + force=dict( + type="bool", + required=False, + default=False, + ), ), ), # For individual data set args @@ -1086,6 +1139,11 @@ def parse_and_validate_args(params): required=False, default=None ), + force=dict( + type="bool", + required=False, + default=False, + ), mutually_exclusive=[ ["batch", "name"], # ["batch", "state"], @@ -1102,6 +1160,7 @@ def parse_and_validate_args(params): ["batch", "key_length"], # ["batch", "replace"], ["batch", "volumes"], + # ["batch", "force"], ], ) parser = BetterArgParser(arg_defs) @@ -1162,6 +1221,11 @@ def run_module(): default=False, ), volumes=dict(type="raw", required=False, aliases=["volume"]), + force=dict( + type="bool", + required=False, + default=False, + ), ), ), # For individual data set args @@ -1213,6 +1277,11 @@ def run_module(): required=False, default=None ), + force=dict( + type="bool", + required=False, + default=False + ), ) result = dict(changed=False, message="", names=[]) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 991ce07ca..37bdcb682 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -16,9 +16,12 @@ __metaclass__ = type import pytest +import time +import subprocess from pipes import quote from pprint import pprint + # TODO: determine if data set names need to be more generic for testcases # TODO: add additional tests to check additional data set creation parameter combinations @@ -460,6 +463,143 @@ def test_batch_data_set_and_member_creation(ansible_zos_module): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + +def test_data_member_force_delete(ansible_zos_module): + MEMBER_1, MEMBER_2, MEMBER_3, MEMBER_4 = "MEM1", "MEM2", "MEM3", "MEM4" + try: + hosts = ansible_zos_module + + # set up: + # create pdse + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) + for result in results.contacted.values(): + assert result.get("changed") is True + + # add members + results = hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_3), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_4), + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # ensure data set/members create successful + for result in results.contacted.values(): + assert result.get("changed") is True + + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + + # non-force attempt to delete MEMBER_2 - should fail since pdse in in use. + results = hosts.all.zos_data_set( + name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_2), + state="absent", + type="MEMBER" + ) + for result in results.contacted.values(): + assert result.get("failed") is True + assert "DatasetMemberDeleteError" in result.get("msg") + + # attempt to delete MEMBER_3 with force option. + results = hosts.all.zos_data_set( + name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_3), state="absent", type="MEMBER", force=True + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + + # attempt to delete MEMBER_4 with force option in batch mode. + results = hosts.all.zos_data_set( + batch=[ + { + "name": "{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_4), + "state": "absent", + "type": "MEMBER", + "force": True + } + ] + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + + # confirm member deleted with mls -- mem1 and mem2 should be present but no mem3 and no mem4 + results = hosts.all.command(cmd="mls {0}".format(DEFAULT_DATA_SET_NAME)) + for result in results.contacted.values(): + assert MEMBER_1 in result.get("stdout") + assert MEMBER_2 in result.get("stdout") + assert MEMBER_3 not in result.get("stdout") + assert MEMBER_4 not in result.get("stdout") + + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + + def test_repeated_operations(ansible_zos_module): try: hosts = ansible_zos_module From 574d0d8a7ad8f5828297b98ed50d6f6f85ad1a80 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 13:00:40 -0700 Subject: [PATCH 082/495] Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --- .../323-zos-job-query-handle-multiple-wildcards.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml index a35827e24..060df2fb1 100644 --- a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml +++ b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml @@ -1,4 +1,7 @@ minor_changes: - - zos_job_query - This enhancement adjusts the job_name and job_id parameters to handle embedded wildcards. - This also required change to job.py/_get_job_status to follow the wildcard feature, using fnmatch logic. - (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- zos_job_query - ansible module does not support positional wild card placement + for `job_name1 or `job_id`. This enhancement allows embedded wildcards + throughout the `job_name` and `job_id`. + (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- module_utils - job.py utility did not support positional wiled card placement, + this enhancement uses `fnmatch` logic to support wild cards. From ded116a442b06d0648df6dec6423931dbeab6d6b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:22:15 -0700 Subject: [PATCH 083/495] Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_job_query.py | 50 ++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 3870440d8..18ad27072 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -23,9 +23,10 @@ short_description: Query job status description: - List z/OS job(s) and the current status of the job(s). + - Uses job_name to filter the jobs by the job name. + - Uses job_id to filter the jobs by the job identifier. - Uses owner to filter the jobs by the job owner. - Uses system to filter the jobs by system where the job is running (or ran) on. - - Uses job_id to filter the jobs by the job id. author: - "Ping Xiao (@xiaopingBJ)" - "Demetrios Dimatos (@ddimatos)" @@ -33,7 +34,10 @@ options: job_name: description: - - The job name to query. Job name can now contain multiple embedded asterisks. + - The job name to query. + - A job name can be up to 8 characters long. + - The I(job_name) can contain include multiple wildcards. + - The asterisk (`*`) wildcard will match zero or more specified characters. type: str required: False default: "*" @@ -46,41 +50,43 @@ required: False job_id: description: - - The job number that has been assigned to the job. These normally begin - with STC, JOB, TSU and are followed by 5 digits. When job are - potentially greater than 99,999, the job number format will begin with - S, J, T and are followed by 7 digits. Job id can now contain multiple, - embedded asterisks. + - The job id that has been assigned to the job. + - A job id begins must begin with `STC`, `JOB`, `TSU` and are + followed by up to 5 digits. + - When a job id is greater than 99,999, the job id format will begin + with `S`, `J`, `T` and are followed by 7 digits. + - The I(job_id) can contain include multiple wildcards. + - The asterisk (`*`) wildcard will match zero or more specified characters. type: str required: False """ EXAMPLES = r""" -- name: list zos jobs with a jobname 'IYK3ZNA1' +- name: Query a job with a job name of 'JOB12345' zos_job_query: - job_name: "IYK3ZNA1" + job_name: "JOB12345" -- name: list the jobs matching jobname 'IYK3*' +- name: Query jobs using a wildcard to match any job id begging with 'JOB12' zos_job_query: - job_name: "IYK3*" + job_id: "JOB12*" -- name: list the jobs that match 'IYKsomethingNAsomething' +- name: Query jobs using wildcards to match any job name begging with 'H' and ending in 'O'. zos_job_query: - job_name: "IYK*NA*" + job_name: "H*O" -- name: list the jobs with JOB in the x014x range only +- name: Query jobs using a wildcards to match a range of job id(s) that include 'JOB' and '014'. zos_job_query: - job_idname: JOB*014* + job_id: JOB*014* -- name: list the job with a jobname 'IYK3ZNA*' and jobid as JOB01427 +- name: Query all job names beginning wih 'H' that match job id range that include '14'. zos_job_query: - job_name: IYK3ZNA* - job_id: JOB01427 + job_name: "H*" + job_id: "JOB*14*" -- name: list the job with a jobname 'IYK3ZNA*' and owner as BROWNAD +- name: Query all jobs names beginning with 'LINK' for owner 'ADMIN'. zos_job_query: - job_name: IYK3ZNA* - owner: BROWNAD + job_name: "LINK*" + owner: ADMIN """ RETURN = r""" From b9d6be1098ff6b291386fbc6a9d682126207fc76 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:25:28 -0700 Subject: [PATCH 084/495] Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_job_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 18ad27072..4ff99a128 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -78,7 +78,7 @@ zos_job_query: job_id: JOB*014* -- name: Query all job names beginning wih 'H' that match job id range that include '14'. +- name: Query all job names beginning wih 'H' that match job id that includes '14'. zos_job_query: job_name: "H*" job_id: "JOB*14*" From 28b910473851f6124bc648912a663b87bdb1d43b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:37:53 -0700 Subject: [PATCH 085/495] Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_job_query.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 4ff99a128..bbd4f0e77 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -177,14 +177,14 @@ sample: [ { - "job_name": "IYK3ZNA1", - "owner": "BROWNAD", + "job_name": "LINKJOB", + "owner": "ADMIN", "job_id": "JOB01427", "ret_code": "null", }, { - "job_name": "IYK3ZNA2", - "owner": "BROWNAD", + "job_name": "LINKCBL", + "owner": "ADMIN", "job_id": "JOB16577", "ret_code": { "msg": "CANCELED", "code": "null" }, }, From a2ca30216a9d82eda22bb3581939e52dbc87441d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:39:18 -0700 Subject: [PATCH 086/495] update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_job_query.rst | 55 +++++++++++++++++++-------- 1 file changed, 39 insertions(+), 16 deletions(-) diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 76fccad68..1d94f9047 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -17,9 +17,10 @@ zos_job_query -- Query job status Synopsis -------- - List z/OS job(s) and the current status of the job(s). +- Uses job_name to filter the jobs by the job name. +- Uses job_id to filter the jobs by the job identifier. - Uses owner to filter the jobs by the job owner. - Uses system to filter the jobs by system where the job is running (or ran) on. -- Uses job_id to filter the jobs by the job id. @@ -32,6 +33,12 @@ Parameters job_name The job name to query. + A job name can be up to 8 characters long. + + The *job_name* can contain include multiple wildcards. + + The asterisk (`*`) wildcard will match zero or more specified characters. + | **required**: False | **type**: str | **default**: * @@ -47,7 +54,15 @@ owner job_id - The job number that has been assigned to the job. These normally begin with STC, JOB, TSU and are followed by 5 digits. When job are potentially greater than 99,999, the job number format will begin with S, J, T and are followed by 7 digits. + The job id that has been assigned to the job. + + A job id begins must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. + + When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. + + The *job_id* can contain include multiple wildcards. + + The asterisk (`*`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -61,23 +76,31 @@ Examples .. code-block:: yaml+jinja - - name: list zos jobs with a jobname 'IYK3ZNA1' + - name: Query a job with a job name of 'JOB12345' + zos_job_query: + job_name: "JOB12345" + + - name: Query jobs using a wildcard to match any job id begging with 'JOB12' + zos_job_query: + job_id: "JOB12*" + + - name: Query jobs using wildcards to match any job name begging with 'H' and ending in 'O'. zos_job_query: - job_name: "IYK3ZNA1" + job_name: "H*O" - - name: list the jobs matching jobname 'IYK3*' + - name: Query jobs using a wildcards to match a range of job id(s) that include 'JOB' and '014'. zos_job_query: - job_name: "IYK3*" + job_id: JOB*014* - - name: list the job with a jobname 'IYK3ZNA*' and jobid as JOB01427 + - name: Query all job names beginning wih 'H' that match job id that includes '14'. zos_job_query: - job_name: IYK3ZNA* - job_id: JOB01427 + job_name: "H*" + job_id: "JOB*14*" - - name: list the job with a jobname 'IYK3ZNA*' and owner as BROWNAD + - name: Query all jobs names beginning with 'LINK' for owner 'ADMIN'. zos_job_query: - job_name: IYK3ZNA* - owner: BROWNAD + job_name: "LINK*" + owner: ADMIN @@ -111,14 +134,14 @@ jobs [ { "job_id": "JOB01427", - "job_name": "IYK3ZNA1", - "owner": "BROWNAD", + "job_name": "LINKJOB", + "owner": "ADMIN", "ret_code": "null" }, { "job_id": "JOB16577", - "job_name": "IYK3ZNA2", - "owner": "BROWNAD", + "job_name": "LINKCBL", + "owner": "ADMIN", "ret_code": { "code": "null", "msg": "CANCELED" From 34ece4c330765f42ae541d9c23b8783e51671bea Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 11 Apr 2023 14:43:52 -0700 Subject: [PATCH 087/495] Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_job_query.rst | 6 +++--- plugins/modules/zos_job_query.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 1d94f9047..d33ca6744 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -153,16 +153,16 @@ jobs The name of the batch job. | **type**: str - | **sample**: IYK3ZNA2 + | **sample**: LINKJOB owner The owner who ran the job. | **type**: str - | **sample**: BROWNAD + | **sample**: ADMIN job_id - Unique job id assigned to the job by JES. + Unique job identifier assigned to the job by JES. | **type**: str | **sample**: JOB01427 diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index bbd4f0e77..ed31f0c0d 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -108,15 +108,15 @@ description: The name of the batch job. type: str - sample: IYK3ZNA2 + sample: LINKJOB owner: description: The owner who ran the job. type: str - sample: BROWNAD + sample: ADMIN job_id: description: - Unique job id assigned to the job by JES. + Unique job identifier assigned to the job by JES. type: str sample: JOB01427 ret_code: From 092cfd5e8d983ebc7beaf7a7a6a120220469d043 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 11 Apr 2023 18:02:08 -0400 Subject: [PATCH 088/495] Added handler for job not found edge cases (None not iterable errors) --- plugins/modules/zos_job_query.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index bbd4f0e77..efa6305a6 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -313,7 +313,13 @@ def parsing_jobs(jobs_raw): for job in jobs_raw: # Easier to see than checking for an empty string, JOB NOT FOUND was # replaced with None in the jobs.py and msg_txt field describes the job query instead - status_raw = job.get("ret_code").get("msg", "JOB NOT FOUND") + if job.get("ret_code") is None: + status_raw = "JOB NOT FOUNDa" + elif job.get("ret_code").get("msg", "JOB NOT FOUND") is None: + status_raw = "JOB NOT FOUNDb" + else: + status_raw = job.get("ret_code").get("msg", "JOB NOT FOUNDc") + if "AC" in status_raw: # the job is active ret_code = None @@ -332,9 +338,11 @@ def parsing_jobs(jobs_raw): elif "ABENDU" in status_raw: # status = 'Ended abnormally' ret_code = {"msg": status_raw, "code": job.get("ret_code").get("code")} + elif "CANCELED" in status_raw or "JCLERR" in status_raw or "JCL ERROR" in status_raw or "JOB NOT FOUND" in status_raw: # status = status_raw ret_code = {"msg": status_raw, "code": None} + else: # status = 'Unknown' ret_code = {"msg": status_raw, "code": job.get("ret_code").get("code")} From a62d76a1a073520b74f71be058137fccb5824c0f Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 11 Apr 2023 18:12:41 -0400 Subject: [PATCH 089/495] corrected pep8 issue (bad indent) --- plugins/modules/zos_job_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index dfce6da0e..7e136a3ba 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -318,7 +318,7 @@ def parsing_jobs(jobs_raw): elif job.get("ret_code").get("msg", "JOB NOT FOUND") is None: status_raw = "JOB NOT FOUNDb" else: - status_raw = job.get("ret_code").get("msg", "JOB NOT FOUNDc") + status_raw = job.get("ret_code").get("msg", "JOB NOT FOUNDc") if "AC" in status_raw: # the job is active From f5de722a0bce4cd7b788ea5cf57687a030559dad Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 11 Apr 2023 18:18:26 -0400 Subject: [PATCH 090/495] removed tracking text from error/not found messages. --- plugins/modules/zos_job_query.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 7e136a3ba..9c2c7dd86 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -314,11 +314,11 @@ def parsing_jobs(jobs_raw): # Easier to see than checking for an empty string, JOB NOT FOUND was # replaced with None in the jobs.py and msg_txt field describes the job query instead if job.get("ret_code") is None: - status_raw = "JOB NOT FOUNDa" + status_raw = "JOB NOT FOUND" elif job.get("ret_code").get("msg", "JOB NOT FOUND") is None: - status_raw = "JOB NOT FOUNDb" + status_raw = "JOB NOT FOUND" else: - status_raw = job.get("ret_code").get("msg", "JOB NOT FOUNDc") + status_raw = job.get("ret_code").get("msg", "JOB NOT FOUND") if "AC" in status_raw: # the job is active From c1126f2e1b52e40752fda28cccaf052ae9f96e7b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 12 Apr 2023 09:52:27 -0600 Subject: [PATCH 091/495] Update zos_job_query.py --- plugins/modules/zos_job_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 9c2c7dd86..28d38b727 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -51,7 +51,7 @@ job_id: description: - The job id that has been assigned to the job. - - A job id begins must begin with `STC`, `JOB`, `TSU` and are + - A job id must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. - When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. From 81b35877dcaf76be46680e989803d9d513027c5f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 13 Apr 2023 15:39:25 -0600 Subject: [PATCH 092/495] Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug --- .../fragments/601-copy-loadlib-member.yml | 3 + .../fragments/734-copy-loadlib-member.yml | 3 + .../functional/modules/test_zos_copy_func.py | 229 ++++++++++++++++++ 3 files changed, 235 insertions(+) create mode 100644 changelogs/fragments/601-copy-loadlib-member.yml create mode 100644 changelogs/fragments/734-copy-loadlib-member.yml diff --git a/changelogs/fragments/601-copy-loadlib-member.yml b/changelogs/fragments/601-copy-loadlib-member.yml new file mode 100644 index 000000000..75b59e654 --- /dev/null +++ b/changelogs/fragments/601-copy-loadlib-member.yml @@ -0,0 +1,3 @@ +bugfixes: +- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. + (https://github.com/ansible-collections/ibm_zos_core/pull/640) diff --git a/changelogs/fragments/734-copy-loadlib-member.yml b/changelogs/fragments/734-copy-loadlib-member.yml new file mode 100644 index 000000000..ebbaad48c --- /dev/null +++ b/changelogs/fragments/734-copy-loadlib-member.yml @@ -0,0 +1,3 @@ +bugfixes: +- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. + (https://github.com/ansible-collections/ibm_zos_core/pull/734) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 884f0e3d6..cfe8e0ee9 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -56,6 +56,58 @@ TEST_PDSE = "SYS1.NFSLIBE" TEST_PDSE_MEMBER = "SYS1.NFSLIBE(GFSAMAIN)" +COBOL_SRC = """ + IDENTIFICATION DIVISION.\n + PROGRAM-ID. HELLOWRD.\n +\n + PROCEDURE DIVISION.\n + DISPLAY "SIMPLE HELLO WORLD".\n + STOP RUN.\n +""" + +LINK_JCL = """ +//COMPLINK JOB MSGCLASS=H,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//STEP1 EXEC PGM=IGYCRCTL +//STEPLIB DD DSN=IGYV5R10.SIGYCOMP,DISP=SHR +// DD DSN=IGYV5R10.SIGYMAC,DISP=SHR +//SYSIN DD DISP=SHR,DSN={0} +//SYSPRINT DD SYSOUT=* +//SYSLIN DD UNIT=SYSDA,DISP=(MOD), +// SPACE=(CYL,(1,1)), +// DCB=(RECFM=FB,LRECL=80,BLKSIZE=27920), +// DSN=&&LOADSET +//SYSUT1 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT2 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT3 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT4 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT5 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT6 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT7 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT8 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT9 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT10 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT11 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT12 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT13 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT14 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSUT15 DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//SYSMDECK DD SPACE=(80,(10,10),,,ROUND),UNIT=SYSDA +//* +//LKED EXEC PGM=IEWL,REGION=0M +//SYSPRINT DD SYSOUT=* +//SYSLIB DD DSN=CEE.SCEELKED,DISP=SHR +// DD DSN=CEE.SCEELKEX,DISP=SHR +//SYSLMOD DD DSN={1}, +// DISP=SHR +//SYSUT1 DD UNIT=SYSDA,DCB=BLKSIZE=1024, +// SPACE=(TRK,(3,3)) +//SYSTERM DD SYSOUT=* +//SYSPRINT DD SYSOUT=* +//SYSLIN DD DSN=&&LOADSET,DISP=(OLD,KEEP) +//SYSIN DD DUMMY +//* + +""" def populate_dir(dir_path): for i in range(5): @@ -145,6 +197,42 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, hosts.all.file(path=record_src, state="absent") +def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): + """ + Given a PDSE, links a cobol program making allocated in a temp ds resulting in ds_name + as a loadlib. + + Arguments: + ds_name (str) -- PDS/E to be linked with the cobol program. + cobol_src (str) -- Cobol source code to be used as the program. + + Notes: PDS names are in the format of SOME.PDSNAME(MEMBER) + """ + # Copy the Link program + temp_jcl = "/tmp/link.jcl" + rc = 0 + try: + cp_res = hosts.all.zos_copy( + content=LINK_JCL.format(cobol_pds, ds_name), + dest="/tmp/link.jcl", + force=True, + ) + for res in cp_res.contacted.values(): + print("copy link program result {0}".format(res)) + # Link the temp ds with ds_name + job_result = hosts.all.zos_job_submit( + src="/tmp/link.jcl", + location="USS", + wait_time_s=60 + ) + for result in job_result.contacted.values(): + print("link job submit result {0}".format(result)) + rc = result.get("jobs")[0].get("ret_code").get("code") + finally: + hosts.all.file(path=temp_jcl, state="absent") + return rc + + @pytest.mark.uss @pytest.mark.parametrize("src", [ dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), @@ -1679,6 +1767,147 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts.all.zos_data_set(name=dest, state="absent") +@pytest.mark.pdse +def test_copy_pds_member_with_system_symbol(ansible_zos_module,): + """This test is for bug #543 in GitHub. In some versions of ZOAU, + datasets.listing can't handle system symbols in volume names and + therefore fails to get details from a dataset. + """ + hosts = ansible_zos_module + # The volume for this dataset should use a system symbol. + # This dataset and member should be available on any z/OS system. + src = "SYS1.SAMPLIB(IZUPRM00)" + dest = "USER.TEST.PDS.DEST" + + try: + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + replace=True + ) + + copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True) + verify_copy = hosts.all.shell( + cmd="mls {0}".format(dest), + executable=SHELL_EXECUTABLE + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == dest + + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + assert len(stdout.splitlines()) == 1 + + finally: + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.pdse +def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): + hosts = ansible_zos_module + # The volume for this dataset should use a system symbol. + # This dataset and member should be available on any z/OS system. + src = "USER.LOAD.SRC" + dest = "USER.LOAD.DEST" + cobol_pds = "USER.COBOL.SRC" + try: + hosts.all.zos_data_set( + name=src, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + hosts.all.zos_data_set( + name=cobol_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + member = "HELLOSRC" + cobol_pds = "{0}({1})".format(cobol_pds, member) + rc = hosts.all.zos_copy( + content=COBOL_SRC, + dest=cobol_pds, + ) + dest_name = "{0}({1})".format(dest, member) + src_name = "{0}({1})".format(src, member) + + + # both src and dest need to be a loadlib + rc = link_loadlib_from_cobol(hosts, dest_name, cobol_pds) + assert rc == 0 + # make sure is executable + cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" + exec_res = hosts.all.shell( + cmd=cmd.format(member, dest) + ) + for result in exec_res.contacted.values(): + assert result.get("rc") == 0 + rc = link_loadlib_from_cobol(hosts, src_name, cobol_pds) + assert rc == 0 + + exec_res = hosts.all.shell( + cmd=cmd.format(member, src) + ) + for result in exec_res.contacted.values(): + assert result.get("rc") == 0 + + copy_res = hosts.all.zos_copy( + src="{0}({1})".format(src, member), + dest="{0}({1})".format(dest, "MEM1"), + remote_src=True) + + verify_copy = hosts.all.shell( + cmd="mls {0}".format(dest), + executable=SHELL_EXECUTABLE + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest, "MEM1") + + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + # number of members + assert len(stdout.splitlines()) == 2 + + finally: + hosts.all.zos_data_set(name=dest, state="absent") + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=cobol_pds, state="absent") + + @pytest.mark.pdse def test_copy_multiple_data_set_members(ansible_zos_module): hosts = ansible_zos_module From 850f519d28f335d3b43683514966529a67cc0f3b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 13 Apr 2023 17:19:48 -0600 Subject: [PATCH 093/495] Removed previous changelog --- changelogs/fragments/601-copy-loadlib-member.yml | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 changelogs/fragments/601-copy-loadlib-member.yml diff --git a/changelogs/fragments/601-copy-loadlib-member.yml b/changelogs/fragments/601-copy-loadlib-member.yml deleted file mode 100644 index 75b59e654..000000000 --- a/changelogs/fragments/601-copy-loadlib-member.yml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: -- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. - (https://github.com/ansible-collections/ibm_zos_core/pull/640) From e7287920612aa391a0b435eb0664ba8651bcca53 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 13 Apr 2023 17:38:50 -0600 Subject: [PATCH 094/495] Removed unused fragment --- changelogs/fragments/734-copy-loadlib-member.yml | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 changelogs/fragments/734-copy-loadlib-member.yml diff --git a/changelogs/fragments/734-copy-loadlib-member.yml b/changelogs/fragments/734-copy-loadlib-member.yml deleted file mode 100644 index ebbaad48c..000000000 --- a/changelogs/fragments/734-copy-loadlib-member.yml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: -- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. - (https://github.com/ansible-collections/ibm_zos_core/pull/734) From f02349c0416b564a29b6d678391a796f8bc8c051 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 13 Apr 2023 17:45:06 -0600 Subject: [PATCH 095/495] Removed test case --- .../functional/modules/test_zos_copy_func.py | 41 ------------------- 1 file changed, 41 deletions(-) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index cfe8e0ee9..85cd6dfbe 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1767,47 +1767,6 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts.all.zos_data_set(name=dest, state="absent") -@pytest.mark.pdse -def test_copy_pds_member_with_system_symbol(ansible_zos_module,): - """This test is for bug #543 in GitHub. In some versions of ZOAU, - datasets.listing can't handle system symbols in volume names and - therefore fails to get details from a dataset. - """ - hosts = ansible_zos_module - # The volume for this dataset should use a system symbol. - # This dataset and member should be available on any z/OS system. - src = "SYS1.SAMPLIB(IZUPRM00)" - dest = "USER.TEST.PDS.DEST" - - try: - hosts.all.zos_data_set( - name=dest, - state="present", - type="pdse", - replace=True - ) - - copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True) - verify_copy = hosts.all.shell( - cmd="mls {0}".format(dest), - executable=SHELL_EXECUTABLE - ) - - for result in copy_res.contacted.values(): - assert result.get("msg") is None - assert result.get("changed") is True - assert result.get("dest") == dest - - for v_cp in verify_copy.contacted.values(): - assert v_cp.get("rc") == 0 - stdout = v_cp.get("stdout") - assert stdout is not None - assert len(stdout.splitlines()) == 1 - - finally: - hosts.all.zos_data_set(name=dest, state="absent") - - @pytest.mark.pdse def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): hosts = ansible_zos_module From 9e355bce1e2df299db6bbf6a39f4b3b0313183d3 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 13 Apr 2023 19:54:46 -0400 Subject: [PATCH 096/495] Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> --- .../fragments/666-zos_tso_command_maxrc.yml | 4 + plugins/modules/zos_tso_command.py | 83 +++++++++++++++---- .../modules/test_zos_tso_command_func.py | 14 +++- 3 files changed, 85 insertions(+), 16 deletions(-) create mode 100644 changelogs/fragments/666-zos_tso_command_maxrc.yml diff --git a/changelogs/fragments/666-zos_tso_command_maxrc.yml b/changelogs/fragments/666-zos_tso_command_maxrc.yml new file mode 100644 index 000000000..c410c00b5 --- /dev/null +++ b/changelogs/fragments/666-zos_tso_command_maxrc.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_tso_command - was enhanced to accept `max_rc` as an option. This option + allows a non-zero return code to succeed as a valid return code. + (https://github.com/ansible-collections/ibm_zos_core/pull/666) diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 7b2601d37..e3c4c6f12 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -20,20 +20,29 @@ DOCUMENTATION = r""" module: zos_tso_command version_added: '1.1.0' -author: "Xiao Yuan Ma (@bjmaxy)" short_description: Execute TSO commands description: - - Execute TSO commands on the target z/OS system with the provided options - and receive a structured response. + - Execute TSO commands on the target z/OS system with the provided options and receive a structured response. +author: + - "Xiao Yuan Ma (@bjmaxy)" + - "Rich Parker (@richp405)" options: commands: description: - One or more TSO commands to execute on the target z/OS system. - Accepts a single string or list of strings as input. + - If a list of strings is provided, processing will stop at the first failure, based on rc. required: true type: raw aliases: - command + max_rc: + description: + - Specifies the maximum return code allowed for a TSO command. + - If more than one TSO command is submitted, the I(max_rc) applies to all TSO commands. + default: 0 + required: false + type: int """ RETURN = r""" @@ -55,6 +64,13 @@ returned: always type: int sample: 0 + max_rc: + description: + - Specifies the maximum return code allowed for a TSO command. + - If more than one TSO command is submitted, the I(max_rc) applies to all TSO commands. + returned: always + type: int + sample: 0 content: description: The response resulting from the execution of the TSO command. @@ -89,6 +105,12 @@ commands: - LU TESTUSER +- name: Execute TSO command to list dataset data (allow 4 for no dataset listed or cert found) + zos_tso_command: + commands: + - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC + max_rc: 4 + """ from ansible.module_utils.basic import AnsibleModule @@ -100,7 +122,7 @@ ) -def run_tso_command(commands, module): +def run_tso_command(commands, module, max_rc): script = """/* REXX */ PARSE ARG cmd address tso @@ -113,11 +135,11 @@ def run_tso_command(commands, module): x = outtrap('OFF') exit rc """ - command_detail_json = copy_rexx_and_run_commands(script, commands, module) + command_detail_json = copy_rexx_and_run_commands(script, commands, module, max_rc) return command_detail_json -def copy_rexx_and_run_commands(script, commands, module): +def copy_rexx_and_run_commands(script, commands, module, max_rc): command_detail_json = [] delete_on_close = True tmp_file = NamedTemporaryFile(delete=delete_on_close) @@ -131,7 +153,17 @@ def copy_rexx_and_run_commands(script, commands, module): command_results["rc"] = rc command_results["content"] = stdout.split("\n") command_results["lines"] = len(command_results.get("content", [])) + command_results["stderr"] = stderr + + if rc <= max_rc: + command_results["failed"] = False + else: + command_results["failed"] = True + command_detail_json.append(command_results) + if command_results["failed"]: + break + return command_detail_json @@ -158,15 +190,18 @@ def list_or_str_type(contents, dependencies): def run_module(): module_args = dict( commands=dict(type="raw", required=True, aliases=["command"]), + max_rc=dict(type="int", required=False, default=0), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) result = dict( changed=False, + failed=True, ) arg_defs = dict( commands=dict(type=list_or_str_type, required=True, aliases=["command"]), + max_rc=dict(type="int", required=False, default=0), ) try: parser = BetterArgParser(arg_defs) @@ -175,19 +210,37 @@ def run_module(): module.fail_json(msg=repr(e), **result) commands = parsed_args.get("commands") + max_rc = parsed_args.get("max_rc") + if max_rc is None: + max_rc = 0 try: - result["output"] = run_tso_command(commands, module) + result["output"] = run_tso_command(commands, module, max_rc) + result["max_rc"] = max_rc + errors_found = False + result_list = [] + for cmd in result.get("output"): - if cmd.get("rc") != 0: - module.fail_json( - msg='The TSO command "' - + cmd.get("command", "") - + '" execution failed.', - **result - ) + tmp_string = 'Command "' + cmd.get("command", "") + '" execution' + if cmd.get("rc") > max_rc: + errors_found = True + if max_rc > 0: + result_list.append(tmp_string + "failed. RC was {0}; max_rc was {1}".format(cmd.get("rc"), max_rc)) + else: + result_list.append(tmp_string + "failed. RC was {0}.".format(cmd.get("rc"))) + else: + result_list.append(tmp_string + "succeeded. RC was {0}.".format(cmd.get("rc"))) + + if errors_found: + result_string = "\n".join(result_list) + + module.fail_json( + msg="Some ({0}) command(s) failed:\n{1}".format(errors_found, result_string), + **result + ) result["changed"] = True + result["failed"] = False module.exit_json(**result) except Exception as e: diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index eeddd9ef3..dbdf888f4 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -160,3 +160,15 @@ def test_zos_tso_command_multiple_commands(ansible_zos_module): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True + + +# The positive test +# The command that kicks off rc>0 which is allowed +def test_zos_tso_command_maxrc(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_tso_command(commands=["LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC"],max_rc=4) + for result in results.contacted.values(): + for item in result.get("output"): + print( item ) + assert item.get("rc") < 5 + assert result.get("changed") is True From 9e680031948eb2746207df2324ec72bf518b7a66 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 13 Apr 2023 23:31:56 -0700 Subject: [PATCH 097/495] added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/734-copy-loadlib-member-test-case.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 changelogs/fragments/734-copy-loadlib-member-test-case.yml diff --git a/changelogs/fragments/734-copy-loadlib-member-test-case.yml b/changelogs/fragments/734-copy-loadlib-member-test-case.yml new file mode 100644 index 000000000..4482c61da --- /dev/null +++ b/changelogs/fragments/734-copy-loadlib-member-test-case.yml @@ -0,0 +1,4 @@ +trivial: +- zos_copy - Adds a test cases to ensure copying from a PDS/E member containing + a loadlib to another PDS/E member loadlib member for issue 601. + (https://github.com/ansible-collections/ibm_zos_core/pull/734) \ No newline at end of file From 192312e9756c24ce8b6e26e8b615258a94e0a3f0 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 17 Apr 2023 09:24:08 -0700 Subject: [PATCH 098/495] zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../740-zos_copy-volume-symbol-test.yml | 5 +++ .../functional/modules/test_zos_copy_func.py | 45 +++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 changelogs/fragments/740-zos_copy-volume-symbol-test.yml diff --git a/changelogs/fragments/740-zos_copy-volume-symbol-test.yml b/changelogs/fragments/740-zos_copy-volume-symbol-test.yml new file mode 100644 index 000000000..a30a50869 --- /dev/null +++ b/changelogs/fragments/740-zos_copy-volume-symbol-test.yml @@ -0,0 +1,5 @@ +trivial: +- zos_copy - prior, there was no test case for symbols on a volume. + This change adds a test case to test a volume which has in it symbols, + issue 738. + (https://github.com/ansible-collections/ibm_zos_core/pull/740) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 85cd6dfbe..dbc76eeff 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1867,6 +1867,51 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): hosts.all.zos_data_set(name=cobol_pds, state="absent") +@pytest.mark.pdse +def test_copy_pds_member_with_system_symbol(ansible_zos_module,): + """This test is for bug #543 in GitHub. In some versions of ZOAU, + datasets.listing can't handle system symbols in volume names and + therefore fails to get details from a dataset. + + Note: `listcat ent('SYS1.SAMPLIB') all` will display 'volser = ******' + and `D SYMBOLS` will show you that `&SYSR2. = "RES80A"` where + the symbols for this volume correspond to volume `RES80A` + """ + hosts = ansible_zos_module + # The volume for this dataset should use a system symbol. + # This dataset and member should be available on any z/OS system. + src = "SYS1.SAMPLIB(IZUPRM00)" + dest = "USER.TEST.PDS.DEST" + + try: + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + replace=True + ) + + copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True) + verify_copy = hosts.all.shell( + cmd="mls {0}".format(dest), + executable=SHELL_EXECUTABLE + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == dest + + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + assert len(stdout.splitlines()) == 1 + + finally: + hosts.all.zos_data_set(name=dest, state="absent") + + @pytest.mark.pdse def test_copy_multiple_data_set_members(ansible_zos_module): hosts = ansible_zos_module From ee42b6a66017900a891ab57be14de52a637cda87 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Mon, 17 Apr 2023 16:43:56 -0700 Subject: [PATCH 099/495] Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> --- .../654-new-module-zos_volume_init.yml | 2 + plugins/module_utils/ickdsf.py | 151 +++++++ plugins/module_utils/zos_mvs_raw.py | 9 +- plugins/modules/zos_volume_init.py | 272 +++++++++++++ .../functional/modules/test_zos_fetch_func.py | 30 +- .../modules/test_zos_volume_init_func.py | 385 ++++++++++++++++++ tests/sanity/ignore-2.10.txt | 5 +- tests/sanity/ignore-2.11.txt | 4 +- tests/sanity/ignore-2.12.txt | 3 + tests/sanity/ignore-2.13.txt | 1 + tests/sanity/ignore-2.14.txt | 1 + tests/sanity/ignore-2.15.txt | 1 + tests/sanity/ignore-2.9.txt | 5 +- 13 files changed, 861 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/654-new-module-zos_volume_init.yml create mode 100644 plugins/module_utils/ickdsf.py create mode 100644 plugins/modules/zos_volume_init.py create mode 100644 tests/functional/modules/test_zos_volume_init_func.py diff --git a/changelogs/fragments/654-new-module-zos_volume_init.yml b/changelogs/fragments/654-new-module-zos_volume_init.yml new file mode 100644 index 000000000..41808d718 --- /dev/null +++ b/changelogs/fragments/654-new-module-zos_volume_init.yml @@ -0,0 +1,2 @@ +major_changes: +- zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. (https://github.com/ansible-collections/ibm_zos_core/pull/654) \ No newline at end of file diff --git a/plugins/module_utils/ickdsf.py b/plugins/module_utils/ickdsf.py new file mode 100644 index 000000000..67ddd3d9d --- /dev/null +++ b/plugins/module_utils/ickdsf.py @@ -0,0 +1,151 @@ +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +# from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( +# MissingZOAUImport, +# MissingImport, +# ) + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmd # pylint: disable=import-error +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import ( # pylint: disable=import-error + DDStatement, + StdoutDefinition, + StdinDefinition, +) + + +def get_init_command(module, result, args): + + # Get parameters from playbooks + address = args.get('address') + verify_volid = args.get('verify_volid') + verify_offline = args.get('verify_offline') + volid = args.get('volid') + vtoc_size = args.get('vtoc_size') + index = args.get('index') + verify_volume_empty = args.get('verify_volume_empty') + sms_managed = args.get('sms_managed') + + # Let AnsibleModule param parsing handle this check. + # validate parameters + # if address is None: + # msg = 'Volume address must be defined' + # # raise Exception(msg) + # module.fail_json(msg) # TODO - fail with result -- do i want an init class so i can self.fail_json? + + # let ICKDSF handle this check. expect RC=12 + # try: + # int(address, 16) + # except ValueError: + # result['failed'] = True + # msg = 'address must be 3 or 4 64-bit hexadecimal digits' + # # raise Exception(msg) + # module.fail_json(msg, **result) # TODO - fail with result -- do i want an init class so i can self.fail_json? + + # convert playbook args to JCL parameters + cmd_args = { + 'address': 'unit({0})'.format(address) + } + + if vtoc_size: + cmd_args['vtoc_size'] = 'vtoc(0, 1, {0})'.format(vtoc_size) + else: + cmd_args['vtoc_size'] = '' + if volid: + cmd_args['volid'] = 'volid({0})'.format(volid) + else: + cmd_args['volid'] = '' + if not verify_volid: + cmd_args['verify_volid'] = 'noverify' + else: + cmd_args['verify_volid'] = 'verify({0})'.format(verify_volid) + if verify_offline: + cmd_args['verify_offline'] = 'verifyoffline' + else: + cmd_args['verify_offline'] = 'noverifyoffline' + if verify_volume_empty: + cmd_args['verify_volume_empty'] = 'nods' + else: + cmd_args['verify_volume_empty'] = 'ds' + if index: + cmd_args['index'] = '' + else: + cmd_args['index'] = 'noindex' + if sms_managed: + cmd_args['sms_managed'] = 'storagegroup' + else: + cmd_args['sms_managed'] = '' + + # Format into JCL strings for zos_mvs_raw + cmd = [ + ' init {0} {1} {2} {3} - '.format( + cmd_args['address'], + cmd_args['verify_volid'], + cmd_args['verify_offline'], + cmd_args['volid']), + ' {0} {1} {2} {3}'.format( + cmd_args['vtoc_size'], + cmd_args['sms_managed'], + cmd_args['verify_volume_empty'], + cmd_args['index'])] + + return cmd + + +def init(module, result, parsed_args): + # Convert args parsed from module to ickdsf INIT command + cmd = get_init_command(module, result, parsed_args) + + # TODO - add error handling here and in get_init_command() for "bad" cmd + + # define/build DDs to pass into MVS Command + + if parsed_args.get('tmp_hlq'): + sysInDDStatement = DDStatement("SYSIN", StdinDefinition(cmd, tmphlq=parsed_args.get('tmp_hlq'))) + else: + sysInDDStatement = DDStatement("SYSIN", StdinDefinition(cmd)) + + # tmphlq is not currently captured in the construction of the StdoutDefinition DD. + # tmphlq is handled in the mvscmd.execute_authorized call in this case. + sysprintDDStatement = DDStatement("SYSPRINT", StdoutDefinition()) + + dds = [] + dds.append(sysprintDDStatement) + dds.append(sysInDDStatement) + + # invoke MVS Command + if parsed_args.get('tmp_hlq'): + response = MVSCmd.execute_authorized("ICKDSF", dds, parm='NOREPLYU,FORCE', tmp_hlq=parsed_args.get('tmp_hlq')) + # uncomment the following line to see MVSCmd verbose output in stderr. + # response = MVSCmd.execute_authorized("ICKDSF", dds, parm='NOREPLYU,FORCE', verbose=True, tmp_hlq=parsed_args.get('tmp_hlq')) + else: + response = MVSCmd.execute_authorized("ICKDSF", dds, parm='NOREPLYU,FORCE') + # uncomment the following line to see MVSCmd verbose output in stderr. + # response = MVSCmd.execute_authorized("ICKDSF", dds, parm='NOREPLYU,FORCE', verbose=True) + + rc = response.rc + + result['rc'] = rc + result['content'] = response.stdout.strip().split("\n") + if response.stderr: + result['stderr'] = response.stderr + + if rc != 0: + result['failed'] = True + msg = "Non-zero return code. See 'content' for details." + module.fail_json(msg=msg, **result) + else: + result['changed'] = True + + return dict(result) diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index f8a91ac0d..7c2badf84 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -45,23 +45,26 @@ def execute(pgm, dds, parm="", debug=False, verbose=False): return MVSCmdResponse(rc, out, err) @staticmethod - def execute_authorized(pgm, dds, parm="", debug=False, verbose=False): + def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): """Execute an authorized MVS command. Args: pgm (str): The name of the program to execute. dds (list[DDStatement]): A list of DDStatement objects. parm (str, optional): Argument string if required by the program. Defaults to "". + tmp_hlq (str): The name of the temporary high level qualifier to use for temp data sets. Returns: MVSCmdResponse: The response of the command. """ module = AnsibleModuleHelper(argument_spec={}) - command = "mvscmdauth {0} {1} {2} ".format( + command = "mvscmdauth {0} {1} {2} {3} ".format( "-d" if debug else "", "-v" if verbose else "", + "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) + rc, out, err = module.run_command(command) return MVSCmdResponse(rc, out, err) diff --git a/plugins/modules/zos_volume_init.py b/plugins/modules/zos_volume_init.py new file mode 100644 index 000000000..03854a80f --- /dev/null +++ b/plugins/modules/zos_volume_init.py @@ -0,0 +1,272 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) + +__metaclass__ = type + +DOCUMENTATION = r""" +module: zos_volume_init +short_description: Initialize volumes or minidisks. +description: + - Initialize a volume or minidisk on z/OS. + - I(zos_volume_init) will create the volume label and entry into the volume + table of contents (VTOC). + - Volumes are used for storing data and executable programs. + - A minidisk is a portion of a disk that is linked to your virtual machine. + - A VTOC lists the data sets that reside on a volume, their location, size, and + other attributes. + - I(zos_volume_init) uses the ICKDSF command INIT to initialize a volume. In some + cases the command could be protected by facility class `STGADMIN.ICK.INIT`. + Protection occurs when the class is active, and the class profile is defined. + Ensure the user executing the Ansible task is permitted to execute + ICKDSF command INIT, otherwise, any user can use the command. + - ICKDSF is an Authorized Program Facility (APF) program on z/OS, + I(zos_volume_init) will run in authorized mode but if the program ICKDSF is + not APF authorized, the task will end. + - Note that defaults set on target z/OS systems may override ICKDSF parameters. + - If is recommended that data on the volume is backed up as the I(zos_volume_init) + module will not perform any backups. You can use the + L(zos_backup_restore,./zos_backup_restore.html) module to backup a volume. + + +version_added: 1.6.0 +author: + - "Austen Stewart (@stewartad)" + - "Almigdad Suliman (@Almigdad-Suliman)" + - "Nicholas Teves (@nktvs)" + - "Nuoya Xie (@nxie13)" + - "Trevor Glassey (@tkglassey)" + - "Tyler Edwards (@TLEdwards-Git)" + - "Ketan Kelkar (@ketankelkar)" + +options: + address: + description: + - I(address) is a 3 or 4 digit hexadecimal number that specifies the + address of the volume or minidisk. + - I(address) can be the number assigned to the device (device number) + when it is installed or the virtual address. + required: true + type: str + verify_volid: + description: + - Verify that the volume serial matches what is on the existing volume or minidisk. + - I(verify_volid) must be 1 to 6 alphanumeric characters or "*NONE*". + - To verify that a volume serial number does not exist, use + I(verify_volid="*NONE*"). + - If I(verify_volid) is specified and the volume serial number does not + match that found on the volume or minidisk, initialization does not complete. + - If I(verify_volid="*NONE*") is specified and a volume serial is found on + the volume or minidisk, initialization does not complete. + - Note, this option is B(not) a boolean, leave it blank to skip the verification. + required: false + type: str + verify_offline: + description: + - Verify that the device is not online to any other systems, initialization + does not complete. + type: bool + required: false + default: true + volid: + description: + - The volume serial number used to initialize a volume or minidisk. + - Expects 1-6 alphanumeric, national ($,#,@) or special characters. + - A I(volid) with less than 6 characters will be padded with spaces. + - A I(volid) can also be referred to as volser or volume serial number. + - When I(volid) is not specified for a previously initialized volume or + minidisk, the volume serial number will remain unchanged. + required: false + type: str + vtoc_size: + description: + - The number of tracks to initialize the volume table of contents (VTOC) with. + - The VTOC will be placed in cylinder 0 head 1. + - If no tracks are specified it will default to the number of tracks in a + cylinder minus 1. Tracks in a cylinder vary based on direct-access storage + device (DASD) models, for 3390 a cylinder is 15 tracks. + required: false + type: int + index: + description: + - Create a volume table of contents (VTOC) index. + - The VTOC index enhances the performance of VTOC access. + - When set to I(false), no index will be created. + required: false + type: bool + default: true + sms_managed: + description: + - Specifies that the volume be managed by Storage Management System (SMS). + - If I(sms_managed) is I(true) then I(index) must also be I(true). + type: bool + required: false + default: true + verify_volume_empty: + description: + - Verify that no data sets other than the volume table of contents (VTOC) + index or the VSAM Volume Data Set(VVDS) exist on the target volume. + required: false + type: bool + default: true + tmp_hlq: + description: + - Override the default high level qualifier (HLQ) for temporary and backup + datasets. + - The default HLQ is the Ansible user used to execute the module and if + that is not available, then the value C(TMPHLQ) is used. + required: false + type: str +seealso: +- module: zos_backup_restore +""" +EXAMPLES = r""" +- name: Initialize target volume with all default options. Target volume address is '1234', set volume name to 'DEMO01'. + Target volume is checked to ensure it is offline and contains no data sets. Volume is SMS managed, has an index + and VTOC size defined by the system. + zos_volume_init: + address: "1234" + volid: "DEMO01" + +- name: Initialize target volume with all default options and additionally check the existing volid + matches the given value 'DEMO02' before re-initializing the volume and renaming it to 'DEMO01'. + zos_volume_init: + address: "1234" + volid: "DEMO01" + verify_volid: "DEMO02" + +- name: Initialize non-SMS managed target volume with all the default options. + zos_volume_init: + address: "1234" + volid: "DEMO01" + sms_managed: no + +- name: Initialize non-SMS managed target volume with all the default options and + override the default high level qualifier (HLQ). + zos_volume_init: + address: 1234 + volid: DEMO01 + sms_managed: no + tmp_hlq: TESTUSR + +- name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as + the existing volume serial is 'ine8d8' and there are no pre-existing data sets on the target. The check to see + if volume is online before intialization is skipped. + zos_volume_init: + address: e8d8 + vtoc_size: 30 + index: yes + sms_managed: yes + volid: ine8d8 + verify_volid: ine8d8 + verify_volume_empty: yes + verify_offline: no + +- name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03' + using Ansible loops. + zos_volume_init: + address: "090{{ item }}" + volid: "DEMO0{{ item }}" + loop: "{{ range(1, 4, 1) }}" +""" +RETURN = r""" +msg: + description: Failure message returned by module. + returned: failure + type: str + sample: "'Index' cannot be False for SMS managed volumes." +rc: + description: + - Return code from ICKDSF init command. + type: dict + returned: when ICKDSF program is run. +content: + description: + - Raw output from ICKDSF. + returned: when ICKDSF program is run. + type: list + elements: str + sample: + [ + "1ICKDSF - MVS/ESA DEVICE SUPPORT FACILITIES 17.0 TIME: 18:32:22 01/17/23 PAGE 1", + "0 ", + "0 INIT UNIT(0903) NOVERIFY NOVERIFYOFFLINE VOLID(KET678) -", + "0 NODS NOINDEX", + "-ICK00700I DEVICE INFORMATION FOR 0903 IS CURRENTLY AS FOLLOWS:", + "- PHYSICAL DEVICE = 3390", + "- STORAGE CONTROLLER = 2107", + "- STORAGE CONTROL DESCRIPTOR = E8", + "- DEVICE DESCRIPTOR = 0C", + "- ADDITIONAL DEVICE INFORMATION = 4A00003C", + "- TRKS/CYL = 15, # PRIMARY CYLS = 100", + "0ICK04000I DEVICE IS IN SIMPLEX STATE", + "0ICK00703I DEVICE IS OPERATED AS A MINIDISK", + " ICK00091I 0903 NED=002107.900.IBM.75.0000000BBA01", + "-ICK03091I EXISTING VOLUME SERIAL READ = KET987", + "-ICK03096I EXISTING VTOC IS LOCATED AT CCHH=X'0000 0001' AND IS 14 TRACKS.", + "0ICK01314I VTOC IS LOCATED AT CCHH=X'0000 0001' AND IS 14 TRACKS.", + "-ICK00001I FUNCTION COMPLETED, HIGHEST CONDITION CODE WAS 0", + "0 18:32:22 01/17/23", + "0 ", + "-ICK00002I ICKDSF PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 0", + ] +""" + +from ansible.module_utils.basic import AnsibleModule + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ickdsf # pylint: disable=import-error + + +def run_module(): + + module_args = dict( + address=dict(type="str", required=True), + verify_volid=dict(type="str", required=False), + verify_offline=dict(type="bool", required=False, default=True), + volid=dict(type="str", required=False), + vtoc_size=dict(type="int", required=False), + index=dict(type="bool", required=False, default=True), + sms_managed=dict(type="bool", required=False, default=True), + verify_volume_empty=dict(type="bool", required=False, default=True), + tmp_hlq=dict(type='str', required=False, default=None), + ) + + result = dict( + changed=False, + ) + + module = AnsibleModule( + argument_spec=module_args, + supports_check_mode=False + ) + + # sms managed and index are defined by ickdsf init as mutually exclusive. + if module.params['sms_managed'] and not module.params['index']: + module.fail_json(msg="'Index' cannot be False for SMS managed volumes.", **result) + + if module.check_mode: + module.exit_json(**result) + + result.update(ickdsf.init(module, result, module.params)) + + module.exit_json(**result) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index f4c514265..bc1154de2 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021 +# Copyright (c) IBM Corporation 2020, 2021, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -31,8 +31,9 @@ DUMMY DATA == LINE 03 == """ + TEST_PS = "IMSTESTL.IMS01.DDCHKPT" -TEST_PS_VB = "IMSTESTL.IMS01.SPOOL1" +TEST_PS_VB = "USER.PRIV.PSVB" TEST_PDS = "IMSTESTL.COMNUC" TEST_PDS_MEMBER = "IMSTESTL.COMNUC(ATRQUERY)" TEST_VSAM = "FETCH.TEST.VS" @@ -78,6 +79,29 @@ /* """ +def create_and_populate_test_ps_vb(ansible_zos_module): + params=dict( + name=TEST_PS_VB, + type='SEQ', + record_format='VB', + record_length='3180', + block_size='3190' + ) + ansible_zos_module.all.zos_data_set(**params) + params = dict( + src=TEST_PS_VB, + block=TEST_DATA + ) + ansible_zos_module.all.zos_blockinfile(**params) + + +def delete_test_ps_vb(ansible_zos_module): + params=dict( + name=TEST_PS_VB, + state='absent' + ) + ansible_zos_module.all.zos_data_set(**params) + def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module @@ -154,6 +178,7 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): def test_fetch_sequential_data_set_variable_block(ansible_zos_module): hosts = ansible_zos_module + create_and_populate_test_ps_vb(ansible_zos_module) params = dict(src=TEST_PS_VB, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS_VB try: @@ -167,6 +192,7 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): finally: if os.path.exists(dest_path): os.remove(dest_path) + delete_test_ps_vb(ansible_zos_module) def test_fetch_partitioned_data_set(ansible_zos_module): diff --git a/tests/functional/modules/test_zos_volume_init_func.py b/tests/functional/modules/test_zos_volume_init_func.py new file mode 100644 index 000000000..39952105c --- /dev/null +++ b/tests/functional/modules/test_zos_volume_init_func.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +import pytest + +__metaclass__ = type + +# TEST_VOL_ADDR = '0903' +# TEST_VOL_SER = 'KET999' +TEST_VOL_ADDR = '01A2' +TEST_VOL_SER = 'USER02' + +INDEX_CREATION_SUCCESS_MSG = 'VTOC INDEX CREATION SUCCESSFUL' +VTOC_LOC_MSG = "ICK01314I VTOC IS LOCATED AT CCHH=X'0000 0001' AND IS {:4d} TRACKS." + + +# Guard Rail to prevent unintentional initialization of targeted volume. +# If this test fails, either reset target volume serial to match +# verify_volid below or change value to match current volume serial on +# target. + +def test_guard_rail_and_setup(ansible_zos_module): + hosts = ansible_zos_module + + # remove all data sets from target volume. Expected to be the following 3 + hosts.all.zos_data_set(name="IMSTESTL.IMS01.SPOOL1", state="absent") + hosts.all.zos_data_set(name="IMSTESTL.IMS01.SPOOL2", state="absent") + hosts.all.zos_data_set(name="IMSTESTL.IMS01.SPOOL3", state="absent") + + params = dict( + address=TEST_VOL_ADDR, + verify_offline=False, + volid=TEST_VOL_SER, + verify_volid='USER02' + ) + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init( + address=params['address'], + verify_offline=params['verify_offline'], + volid=params['volid'], + verify_volid=params['verify_volid'] + ) + + for result in results.contacted.values(): + # assert result.get('changed') is True + assert result['rc'] == 0 + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + +@pytest.mark.parametrize( + "params", [ + # min params test with index : true + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'index' : True + }), + # min params test with index : false + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'index' : False, + 'sms_managed' : False # default is True, which cannot be with no index. + }), + ] +) +def test_index_param(ansible_zos_module, params): + hosts = ansible_zos_module + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get('rc') == 0 + content_str = ''.join(result.get("content")) + if params['index']: + assert INDEX_CREATION_SUCCESS_MSG in content_str + else: + assert INDEX_CREATION_SUCCESS_MSG not in content_str + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + +# check that correct volume_addr is assigned to correct volid +def test_volid_address_assigned_correctly(ansible_zos_module): + hosts = ansible_zos_module + + params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + } + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get('rc') == 0 + + # The display command issued queries a volume called $TEST_VOL_SER. The + # expected return values are: 'IEE455I UNIT STATUS NO DEVICES WITH REQUESTED + # ATTRIBUTES' or a line with several attributes including unit address + # example output: + # 'UNIT TYPE STATUS VOLSER VOLSTATE SS' + # '0903 3390 O DEMO01 PRIV/RSDNT 0' + # or: + # 'IEE455I UNIT STATUS NO DEVICES WITH REQUESTED ATTRIBUTES' + # (expected value $TEST_VOL_ADDR) and volume serial + # (expected value $TEST_VOL_SER). If those two match, then the 'volid' + # parameter is correctly assigned to the 'address' parameter. + + # Display command to print device status, volser and addr should correspond + display_cmd_output = list(hosts.all.zos_operator(cmd=f"D U,VOL={TEST_VOL_SER}").contacted.values())[0] + + # zos_operator output contains the command as well, only the last line of + # the output is relevant for the needs of this test case. + display_cmd_output = display_cmd_output.get('content')[-1] + + assert TEST_VOL_SER in display_cmd_output + +def test_no_index_sms_managed_mutually_exclusive(ansible_zos_module): + hosts = ansible_zos_module + + params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'index' : False, + 'sms_managed' : True + } + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + for result in results.contacted.values(): + assert result.get("changed") is False + assert "'Index' cannot be False" in result.get("msg") + +def test_vtoc_size_parm(ansible_zos_module): + hosts = ansible_zos_module + + params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'vtoc_size' : 8 + # 'vtoc_size' : 11 # test to test that this test handles 2 digit vtoc_index + } + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get('rc') == 0 + content_str = ''.join(result.get("content")) + assert VTOC_LOC_MSG.format(params.get('vtoc_size')) in content_str + +@pytest.mark.parametrize( + "params", [ + # min params test; also sets up with expected attrs (eg existing volid) + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + }), + # verify_volid check - volid is known b/c previous test set it up. + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'verify_volid' : TEST_VOL_SER + }), + # verify_volume_empty check - no data sets on vol is known b/c previous test set it up. + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'verify_volume_empty' : True + }), + ] +) + + +def test_good_param_values(ansible_zos_module, params): + hosts = ansible_zos_module + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get('rc') == 0 + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + +@pytest.mark.parametrize( + "params,expected_rc", [ + # address not hexadecimal + ({ + 'address': 'XYZ', + 'verify_offline': False, + 'volid': TEST_VOL_SER + }, 12), + # address length too short + ({ + 'address': '01', + 'verify_offline': False, + 'volid': TEST_VOL_SER + }, 12), + # address specified is not accesible to current + ({ + 'address': '0000', + 'verify_offline': False, + 'volid': TEST_VOL_SER + }, 12), + # negative value for vtoc_size + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'vtoc_size': -10 + }, 12), + # note - "'vtoc_size': 0" gets treated as vtoc_size wasn't defined and invokes default behavior. + # volid check - incorrect existing volid + ({ + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'verify_volid': '000000' + }, 12), + # volid value too long + ({ + 'address': 'ABCDEFGHIJK', + 'verify_offline': False, + 'volid': TEST_VOL_SER, + }, 12), + # ({}, 0) + + ] +) + +def test_bad_param_values(ansible_zos_module, params, expected_rc): + hosts = ansible_zos_module + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init(**params) + + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get('failed') is True + assert result.get('rc') == expected_rc + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + +# Note - volume needs to be sms managed for zos_data_set to work. Possible +# points of failure are: +# unable to init volume first time around +# unable to allocate data set +# unable to bring vol back online to delete data set +# If there is a data set remaining on the volume, that would interfere +# with other tests! + +def test_no_existing_data_sets_check(ansible_zos_module): + hosts = ansible_zos_module + + setup_params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'sms_managed': False # need non-sms managed to add data set on ECs + } + test_params = { + 'address': TEST_VOL_ADDR, + 'verify_offline': False, + 'volid': TEST_VOL_SER, + 'verify_volume_empty': True, + } + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + try: + # set up/initialize volume properly so a data set can be added + hosts.all.zos_volume_init(**setup_params) + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + # allocate data set to volume + hosts.all.zos_data_set(name="USER.PRIVATE.TESTDS", type='pds', volumes=TEST_VOL_SER) + + # take volume back offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + # run vol_init against vol with data set on it. + results = hosts.all.zos_volume_init(**test_params) + + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get('failed') is True + assert result.get('rc') == 12 + + # clean up just in case of failures, volume needs to be reset for other + # tests. Not sure what to do for DatasetDeleteError + finally: + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") + + # remove data set + hosts.all.zos_data_set(name="USER.PRIVATE.TESTDS", state='absent') + + +# Note - technically verify_offline is not REQUIRED but it defaults to True +# and the volumes on the EC systems do not seem to go fully offline. +# Therefore, while testing against the EC machines, the verify_offline +# check needs to be skipped in order for ickdsf to be invoked. + +def test_minimal_params(ansible_zos_module): + hosts = ansible_zos_module + + params = dict( + address=TEST_VOL_ADDR, + verify_offline=False, + volid=TEST_VOL_SER + ) + + # take volume offline + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") + + results = hosts.all.zos_volume_init( + address=params['address'], + verify_offline=params['verify_offline'], + volid=params['volid'] + ) + + for result in results.contacted.values(): + assert result.get('changed') is True + assert result['rc'] == 0 + + # bring volume back online + hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},online") diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index 01b86286b..74db3a282 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -79,4 +79,7 @@ plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file +plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 874e6d4de..420528c74 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -59,7 +59,6 @@ plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_lineinfile.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_lineinfile.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_lineinfile.py compile-2.7!skip # Python 2.7 f string is not supported plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_mount.py import-2.6!skip # Python 2.6 is unsupported @@ -81,3 +80,6 @@ plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index cc80ef80c..420528c74 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -80,3 +80,6 @@ plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index 67be075f7..8b4540038 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -32,3 +32,4 @@ plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 67be075f7..8b4540038 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -32,3 +32,4 @@ plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 67be075f7..8b4540038 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -32,3 +32,4 @@ plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index d9b87031d..4a6c8a778 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -78,4 +78,7 @@ plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py compile-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported \ No newline at end of file +plugins/modules/zos_gather_facts.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_volume_init.py import-2.6!skip # Python 2.6 is unsupported From a856d18e9797f1d2eacf4ca7ab350b9f4cc3b390 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 21 Apr 2023 09:55:54 -0700 Subject: [PATCH 100/495] Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization --- .../fragments/743-zos_copy-encoding-bugs.yml | 9 + plugins/modules/zos_copy.py | 186 +++++++++++++----- .../functional/modules/test_zos_copy_func.py | 100 +++++++++- 3 files changed, 248 insertions(+), 47 deletions(-) create mode 100644 changelogs/fragments/743-zos_copy-encoding-bugs.yml diff --git a/changelogs/fragments/743-zos_copy-encoding-bugs.yml b/changelogs/fragments/743-zos_copy-encoding-bugs.yml new file mode 100644 index 000000000..1b58ddabe --- /dev/null +++ b/changelogs/fragments/743-zos_copy-encoding-bugs.yml @@ -0,0 +1,9 @@ +bugfixes: +- zos_copy - Fixes a bug where files not encoded in IBM-1047 + would trigger an error while computing the record length + for a new destination dataset. Issue 664. + (https://github.com/ansible-collections/ibm_zos_core/pull/743) +- zos_copy - Fixes a bug where the code for fixing an issue with + newlines in files (issue 599) would use the wrong encoding + for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/743) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 2fe9ffd4c..739c0d8d0 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -926,15 +926,20 @@ def file_has_crlf_endings(self, src): {bool} -- True if the file uses CRLF endings, False if it uses LF ones. """ + # Python has to read the file in binary mode to not mask CRLF + # endings or enable universal newlines. If we used encoding="cp037", + # we would get '\n' as the line ending even when the file uses '\r\n'. with open(src, "rb") as src_file: - # readline() will read until it finds a \n. - content = src_file.readline() + # Reading the file in 1024-byte chunks. + content = src_file.read(1024) - # In EBCDIC, \r\n are bytes 0d and 15, respectively. - if content.endswith(b'\x0d\x15'): - return True - else: - return False + while content: + # In EBCDIC, \r\n are bytes 0d and 15, respectively. + if b'\x0d\x15' in content: + return True + content = src_file.read(1024) + + return False def create_temp_with_lf_endings(self, src): """Creates a temporary file with the same content as src but without @@ -955,10 +960,11 @@ def create_temp_with_lf_endings(self, src): with open(converted_src, "wb") as converted_file: with open(src, "rb") as src_file: - current_line = src_file.read() - converted_file.write(current_line.replace(b'\x0d', b'')) + chunk = src_file.read(1024) + # In IBM-037, \r is the byte 0d. + converted_file.write(chunk.replace(b'\x0d', b'')) - self._tag_file_encoding(converted_src, encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET) + self._tag_file_encoding(converted_src, "IBM-037") return converted_src except Exception as err: @@ -1314,6 +1320,7 @@ def copy_to_pdse( src_ds_type, src_member=None, dest_member=None, + encoding=None, ): """Copy source to a PDS/PDSE or PDS/PDSE member. @@ -1323,12 +1330,13 @@ def copy_to_pdse( Arguments: src {str} -- Path to USS file/directory or data set name. temp_path {str} -- Path to the location where the control node - transferred data to - conv_path {str} -- Path to the converted source file/directory - dest {str} -- Name of destination data set - src_ds_type {str} -- The type of source + transferred data to. + conv_path {str} -- Path to the converted source file/directory. + dest {str} -- Name of destination data set. + src_ds_type {str} -- The type of source. src_member {bool, optional} -- Member of the source data set to copy. - dest_member {str, optional} -- Name of destination member in data set + dest_member {str, optional} -- Name of destination member in data set. + encoding {dict, optional} -- Dictionary with encoding options. """ new_src = conv_path or temp_path or src src_members = [] @@ -1341,7 +1349,11 @@ def copy_to_pdse( else: path, dirs, files = next(os.walk(new_src)) - src_members = [os.path.normpath("{0}/{1}".format(path, file)) for file in files] + src_members = [ + os.path.normpath("{0}/{1}".format(path, file)) if self.is_binary + else normalize_line_endings("{0}/{1}".format(path, file), encoding) + for file in files + ] dest_members = [ dest_member if dest_member else data_set.DataSet.get_member_name_from_file(file) @@ -1453,7 +1465,7 @@ def get_file_record_length(file): """ max_line_length = 0 - with open(file, "r") as src_file: + with open(file, "r", encoding="utf-8") as src_file: current_line = src_file.readline() while current_line: @@ -2078,6 +2090,53 @@ def allocate_destination_data_set( return True +def normalize_line_endings(src, encoding=None): + """ + Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes + its line endings to LF. + + Arguments: + src (str) -- Path of a USS file. + encoding (dict, optional) -- Encoding options for the module. + + Returns: + str -- Path to the normalized file. + """ + # Before copying into a destination dataset, we'll make sure that + # the source file doesn't contain any carriage returns that would + # result in empty records in the destination. + # Due to the differences between encodings, we'll normalize to IBM-037 + # before checking the EOL sequence. + enc_utils = encode.EncodeUtils() + src_tag = enc_utils.uss_file_tag(src) + copy_handler = CopyHandler(AnsibleModuleHelper(dict())) + + if src_tag == "untagged": + # This should only be true when src is a remote file and no encoding + # was specified by the user. + if not encoding: + encoding = {"from": encode.Defaults.get_default_system_charset()} + src_tag = encoding["from"] + + if src_tag != "IBM-037": + fd, converted_src = tempfile.mkstemp() + os.close(fd) + + enc_utils.uss_convert_encoding( + src, + converted_src, + src_tag, + "IBM-037" + ) + copy_handler._tag_file_encoding(converted_src, "IBM-037") + src = converted_src + + if copy_handler.file_has_crlf_endings(src): + src = copy_handler.create_temp_with_lf_endings(src) + + return src + + def run_module(module, arg_def): # ******************************************************************** # Verify the validity of module args. BetterArgParser raises ValueError @@ -2160,6 +2219,7 @@ def run_module(module, arg_def): # and destination datasets, if needed. # ******************************************************************** dest_member_exists = False + converted_src = None try: # If temp_path, the plugin has copied a file from the controller to USS. if temp_path or "/" in src: @@ -2167,6 +2227,38 @@ def run_module(module, arg_def): if remote_src and os.path.isdir(src): is_src_dir = True + + # When the destination is a dataset, we'll normalize the source + # file to UTF-8 for the record length computation as Python + # generally uses UTF-8 as the default encoding. + if not is_uss: + new_src = temp_path or src + new_src = os.path.normpath(new_src) + # Normalizing encoding when src is a USS file (only). + encode_utils = encode.EncodeUtils() + src_tag = encode_utils.uss_file_tag(new_src) + # Normalizing to UTF-8. + if not is_src_dir and src_tag != "UTF-8": + # If untagged, assuming the encoding/tag is the system's default. + if src_tag == "untagged" or src_tag is None: + if encoding: + src_tag = encoding["from"] + else: + src_tag = encode.Defaults.get_default_system_charset() + + # Converting the original src to a temporary one in UTF-8. + fd, converted_src = tempfile.mkstemp() + os.close(fd) + encode_utils.uss_convert_encoding( + new_src, + converted_src, + src_tag, + "UTF-8" + ) + + # Creating the handler just for tagging, we're not copying yet! + copy_handler = CopyHandler(module, is_binary=is_binary) + copy_handler._tag_file_encoding(converted_src, "UTF-8") else: if data_set.DataSet.data_set_exists(src_name): if src_member and not data_set.DataSet.data_set_member_exists(src): @@ -2344,6 +2436,17 @@ def run_module(module, arg_def): emergency_backup = data_set.DataSet.temp_name() data_set.DataSet.allocate_model_data_set(emergency_backup, dest_name) + # Here we'll use the normalized source file by shadowing the + # original one. This change applies only to the + # allocate_destination_data_set call. + if converted_src: + if remote_src: + original_src = src + src = converted_src + else: + original_temp = temp_path + temp_path = converted_src + try: if not is_uss: res_args["changed"] = allocate_destination_data_set( @@ -2360,11 +2463,22 @@ def run_module(module, arg_def): if dest_exists and not force: restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) erase_backup(emergency_backup, dest_ds_type) + if converted_src: + if remote_src: + src = original_src + else: + temp_path = original_temp module.fail_json( msg="Unable to allocate destination data set: {0}".format(str(err)), dest_exists=dest_exists ) + if converted_src: + if remote_src: + src = original_src + else: + temp_path = original_temp + # ******************************************************************** # Encoding conversion is only valid if the source is a local file, # local directory or a USS file/directory. @@ -2433,35 +2547,8 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_SEQ: if src_ds_type == "USS" and not is_binary: - # Before copying into the destination dataset, we'll make sure that - # the source file doesn't contain any carriage returns that would - # result in empty records in the destination. - # Due to the differences between encodings, we'll normalize to IBM-037 - # before checking the EOL sequence. new_src = conv_path or temp_path or src - enc_utils = encode.EncodeUtils() - src_tag = enc_utils.uss_file_tag(new_src) - - if src_tag == "untagged": - src_tag = encode.Defaults.DEFAULT_EBCDIC_USS_CHARSET - - if src_tag not in encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET: - fd, converted_src = tempfile.mkstemp() - os.close(fd) - - enc_utils.uss_convert_encoding( - new_src, - converted_src, - src_tag, - encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET - ) - copy_handler._tag_file_encoding(converted_src, encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET) - new_src = converted_src - - if copy_handler.file_has_crlf_endings(new_src): - new_src = copy_handler.create_temp_with_lf_endings(new_src) - - conv_path = new_src + conv_path = normalize_line_endings(new_src, encoding) copy_handler.copy_to_seq( src, @@ -2484,7 +2571,14 @@ def run_module(module, arg_def): ) pdse_copy_handler.copy_to_pdse( - src, temp_path, conv_path, dest_name, src_ds_type, src_member=src_member, dest_member=dest_member + src, + temp_path, + conv_path, + dest_name, + src_ds_type, + src_member=src_member, + dest_member=dest_member, + encoding=encoding ) res_args["changed"] = True dest = dest.upper() diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index dbc76eeff..c5f660a6c 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021 +# Copyright (c) IBM Corporation 2020, 2021, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -41,6 +41,8 @@ DUMMY DATA ---- LINE 007 ------ """ +DUMMY_DATA_CRLF = b"00000001 DUMMY DATA\r\n00000002 DUMMY DATA\r\n" + VSAM_RECORDS = """00000001A record 00000002A record 00000003A record @@ -115,6 +117,12 @@ def populate_dir(dir_path): infile.write(DUMMY_DATA) +def populate_dir_crlf_endings(dir_path): + for i in range(5): + with open(os.path.join(dir_path, "file{0}".format(i)), "wb") as infile: + infile.write(DUMMY_DATA_CRLF) + + def populate_partitioned_data_set(hosts, name, ds_type, members=None): """Creates a new partitioned data set and inserts records into various members of it. @@ -1056,14 +1064,72 @@ def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): assert v_recl.get("rc") == 0 stdout = v_recl.get("stdout").split() assert len(stdout) == 5 + # Verifying the dataset type (sequential). assert stdout[1] == "PS" + # Verifying the record format is Fixed Block. assert stdout[2] == "FB" + # Verifying the record length is 31. The dummy data has 31 + # characters per line. assert stdout[3] == "31" finally: hosts.all.zos_data_set(name=dest, state="absent") os.remove(src) +@pytest.mark.uss +@pytest.mark.seq +def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): + hosts = ansible_zos_module + dest = "USER.TEST.SEQ.FUNCTEST" + + fd, src = tempfile.mkstemp() + os.close(fd) + with open(src, "wb") as infile: + infile.write(DUMMY_DATA_CRLF) + + try: + hosts.all.zos_data_set(name=dest, state="absent") + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=False, + is_binary=False + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest), + executable=SHELL_EXECUTABLE, + ) + + verify_recl = hosts.all.shell( + cmd="dls -l {0}".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert len(v_cp.get("stdout_lines")) == 2 + for v_recl in verify_recl.contacted.values(): + assert v_recl.get("rc") == 0 + stdout = v_recl.get("stdout").split() + assert len(stdout) == 5 + # Verifying the dataset type (sequential). + assert stdout[1] == "PS" + # Verifying the record format is Fixed Block. + assert stdout[2] == "FB" + # Verifying the record length is 19. The dummy data has 19 + # characters per line. + assert stdout[3] == "19" + finally: + hosts.all.zos_data_set(name=dest, state="absent") + os.remove(src) + + @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ @@ -1651,6 +1717,38 @@ def test_copy_dir_to_non_existing_pdse(ansible_zos_module): hosts.all.zos_data_set(name=dest, state="absent") +@pytest.mark.uss +@pytest.mark.pdse +def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): + hosts = ansible_zos_module + dest = "USER.TEST.PDSE.FUNCTEST" + + temp_path = tempfile.mkdtemp() + src_basename = "source/" + source_path = "{0}/{1}".format(temp_path, src_basename) + + try: + os.mkdir(source_path) + populate_dir_crlf_endings(source_path) + + copy_res = hosts.all.zos_copy(src=source_path, dest=dest) + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}({1})'\"".format(dest, "FILE2"), + executable=SHELL_EXECUTABLE, + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == dest + for result in verify_copy.contacted.values(): + assert result.get("rc") == 0 + assert len(result.get("stdout_lines")) == 2 + finally: + shutil.rmtree(temp_path) + hosts.all.zos_data_set(name=dest, state="absent") + + @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("src_type", ["pds", "pdse"]) From 673bd70349b3450066917de9d435effc316a4dff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 24 Apr 2023 15:14:54 -0600 Subject: [PATCH 101/495] Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../731-zos_linefile-disposition_share.yaml | 6 + plugins/modules/zos_lineinfile.py | 39 +++- .../modules/test_zos_blockinfile_func.py | 35 ++- .../modules/test_zos_lineinfile_func.py | 212 ++++++++++++++++-- tests/helpers/zos_blockinfile_helper.py | 194 +++++++++++++++- tests/helpers/zos_lineinfile_helper.py | 181 ++++++++++++++- 6 files changed, 640 insertions(+), 27 deletions(-) create mode 100644 changelogs/fragments/731-zos_linefile-disposition_share.yaml diff --git a/changelogs/fragments/731-zos_linefile-disposition_share.yaml b/changelogs/fragments/731-zos_linefile-disposition_share.yaml new file mode 100644 index 000000000..da6dbc19b --- /dev/null +++ b/changelogs/fragments/731-zos_linefile-disposition_share.yaml @@ -0,0 +1,6 @@ +minor_changes: +- zos_lineinfile - would access data sets with exclusive access so no other + task can read the data, this enhancement allows for a data set to be opened + with a disposition set to share so that other tasks can access the data when + option `force` is set to `true`. + (https://github.com/ansible-collections/ibm_zos_core/pull/731) \ No newline at end of file diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 7a26ce299..c2a7a719c 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -173,6 +173,18 @@ required: false type: str default: IBM-1047 + force: + description: + - Specifies that the data set can be shared with others during an update + which results in the data set you are updating to be simultaneously + updated by others. + - This is helpful when a data set is being used in a long running process + such as a started task and you are wanting to update or read. + - The C(force) option enables sharing of data sets through the disposition + I(DISP=SHR). + required: false + type: bool + default: false notes: - It is the playbook author or user's responsibility to avoid files that should not be encoded, such as binary files. A user is described @@ -218,6 +230,14 @@ regexp: '^(.*)User(\d+)m(.*)$' line: '\1APPUser\3' backrefs: yes + +- name: Add a line to a member while a task is in execution + zos_lineinfile: + src: SOME.PARTITIONED.DATA.SET(DATA) + insertafter: EOF + line: 'Should be a working test now' + force: True + """ RETURN = r""" @@ -271,7 +291,7 @@ DS_TYPE = ['PS', 'PO'] -def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs): +def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs, force): """Replace a line with the matching regex pattern Insert a line before/after the matching pattern Insert a line at BOF/EOF @@ -292,6 +312,7 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs encoding: {str} -- Encoding of the src. first_match: {bool} -- Take the first matching regex pattern. backrefs: {bool} -- Back reference + force: {bool} -- force for modify a member part of a task in execution Returns: str -- Information in JSON format. keys: @@ -310,10 +331,11 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs backref=backrefs, state=True, debug=True, + force=force, ) -def absent(src, line, regexp, encoding): +def absent(src, line, regexp, encoding, force): """Delete lines with matching regex pattern Arguments: @@ -322,6 +344,7 @@ def absent(src, line, regexp, encoding): regexp will be ignored. regexp: {str} -- The regular expression to look for in every line of the src. encoding: {str} -- Encoding of the src. + force: {bool} -- force for modify a member part of a task in execution Returns: str -- Information in JSON format. keys: @@ -329,7 +352,7 @@ def absent(src, line, regexp, encoding): found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the source was modified. """ - return datasets.lineinfile(src, line, regex=regexp, encoding=encoding, state=False, debug=True) + return datasets.lineinfile(src, line, regex=regexp, encoding=encoding, state=False, debug=True, force=force) def quotedString(string): @@ -364,7 +387,8 @@ def main(): backup_name=dict(type='str', required=False, default=None), firstmatch=dict(type='bool', default=False), encoding=dict(type='str', default="IBM-1047"), - tmp_hlq=dict(type='str', required=False, default=None) + tmp_hlq=dict(type='str', required=False, default=None), + force=dict(type='bool', required=False, default=False) ) module = AnsibleModule( argument_spec=module_args, @@ -385,6 +409,7 @@ def main(): firstmatch=dict(arg_type="bool", required=False, default=False), backrefs=dict(arg_type="bool", dependencies=['regexp'], required=False, default=False), tmp_hlq=dict(type='qualifier_or_empty', required=False, default=None), + force=dict(arg_type='bool', required=False, default=False), mutually_exclusive=[["insertbefore", "insertafter"]],) try: @@ -406,6 +431,7 @@ def main(): ins_bef = parsed_args.get('insertbefore') encoding = parsed_args.get('encoding') tmphlq = parsed_args.get('tmp_hlq') + force = parsed_args.get('force') if parsed_args.get('state') == 'present': if backrefs and regexp is None: @@ -453,9 +479,10 @@ def main(): # state=present, insert/replace a line with matching regex pattern # state=absent, delete lines with matching regex pattern if parsed_args.get('state') == 'present': - return_content = present(src, quotedString(line), quotedString(regexp), quotedString(ins_aft), quotedString(ins_bef), encoding, firstmatch, backrefs) + return_content = present(src, quotedString(line), quotedString(regexp), quotedString(ins_aft), quotedString(ins_bef), encoding, firstmatch, + backrefs, force) else: - return_content = absent(src, quotedString(line), quotedString(regexp), encoding) + return_content = absent(src, quotedString(line), quotedString(regexp), encoding, force) stdout = return_content.stdout_response stderr = return_content.stderr_response rc = return_content.rc diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index f6b735487..7cd92c9e5 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -17,6 +17,8 @@ DsGeneral, DsNotSupportedHelper, DsGeneralResultKeyMatchesRegex, + DsGeneralForce, + DsGeneralForceFail, ) import os import sys @@ -238,6 +240,14 @@ test_ds_block_insertafter_eof_with_backup_name=dict( block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=MVS_BACKUP_DS), + test_ds_block_insertafter_regex_force=dict( + path="",insertafter="ZOAU_ROOT=", + block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + state="present", force=True), + test_ds_block_insertafter_regex_force_fail=dict( + path="",insertafter="ZOAU_ROOT=", + block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + state="present", force=False), expected=dict(test_uss_block_insertafter_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none @@ -1498,6 +1508,17 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, encodi ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") +@pytest.mark.ds +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): + TEST_ENV["DS_TYPE"] = dstype + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_INFO["test_ds_block_insertafter_regex_force"], + TEST_INFO["expected"]["test_uss_block_insertafter_regex_defaultmarker"] + ) + + ######################### # Negative tests ######################### @@ -1545,4 +1566,14 @@ def test_ds_not_supported(ansible_zos_module, dstype): DsNotSupportedHelper( TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], ansible_zos_module, TEST_ENV, TEST_INFO["test_uss_block_insertafter_regex"] - ) \ No newline at end of file + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): + TEST_ENV["DS_TYPE"] = dstype + DsGeneralForceFail( + ansible_zos_module, TEST_ENV, + TEST_INFO["test_ds_block_insertafter_regex_force_fail"], + ) diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index c001ebb0d..7b77c155d 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -17,6 +17,8 @@ DsGeneral, DsNotSupportedHelper, DsGeneralResultKeyMatchesRegex, + DsGeneralForceFail, + DsGeneralForce, ) import os import sys @@ -120,6 +122,23 @@ test_ds_line_replace_nomatch_insertbefore_nomatch=dict(test_name="T11"), test_ds_line_absent=dict(test_name="T12"), test_ds_line_tmp_hlq_option=dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ"), + test_ds_line_force=dict(path="",insertafter="EOF", line="export ZOAU_ROOT", force=True), + test_ds_line_force_fail=dict(path="",insertafter="EOF", line="export ZOAU_ROOT", force=False), + test_ds_line_replace_force=dict(path="",regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", + state="present",force=True), + test_ds_line_insertafter_regex_force=dict(path="",insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", + state="present",force=True), + test_ds_line_insertbefore_regex_force=dict(path="",insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present",force=True), + test_ds_line_insertbefore_bof_force=dict(path="",insertbefore="BOF", line="# this is file is for setting env vars", + state="present",force=True), + test_ds_line_replace_match_insertafter_ignore_force=dict(path="",regexp="ZOAU_ROOT=", insertafter="PATH=", + line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present",force=True), + test_ds_line_replace_match_insertbefore_ignore_force=dict(path="",regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", + state="present",force=True), + test_ds_line_replace_nomatch_insertafter_match_force=dict(path="",regexp="abcxyz", insertafter="ZOAU_ROOT=", + line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present",force=True), + test_ds_line_replace_nomatch_insertbefore_match_force=dict(path="",regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", + state="present",force=True), expected=dict(test_uss_line_replace="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none @@ -568,7 +587,42 @@ export PYTHONPATH export PKG_CONFIG_PATH export PYTHON_HOME -export _BPXK_AUTOCVT"""), +export _BPXK_AUTOCVT""", + test_ds_line_force="""if [ -z STEPLIB ] && tty -s; +then + export STEPLIB=none + exec -a 0 SHELL +fi +TZ=PST8PDT +export TZ +LANG=C +export LANG +readonly LOGNAME +PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin +export PATH +LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +export LIBPATH +NLSPATH=/usr/lib/nls/msg/%L/%N +export NLSPATH +MANPATH=/usr/man/%L +export MANPATH +MAIL=/usr/mail/LOGNAME +export MAIL +umask 022 +ZOAU_ROOT=/usr/lpp/zoautil/v100 +ZOAUTIL_DIR=/usr/lpp/zoautil/v100 +PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig +PYTHON_HOME=/usr/lpp/izoda/v110/anaconda +_BPXK_AUTOCVT=ON +export ZOAU_ROOT +export ZOAUTIL_DIR +export ZOAUTIL_DIR +export PYTHONPATH +export PKG_CONFIG_PATH +export PYTHON_HOME +export _BPXK_AUTOCVT +export ZOAU_ROOT"""), ) ######################### @@ -708,20 +762,6 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): # Dataset test cases ######################### -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_INFO["test_uss_line_replace"]["line"] = 'ZOAU_ROOT=/mvsutil-develop_dsed' - DsGeneral( - TEST_INFO["test_ds_line_replace"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace"], - TEST_INFO["expected"]["test_uss_line_replace"] - ) - - @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) @pytest.mark.parametrize("encoding", ENCODING) @@ -909,3 +949,145 @@ def test_ds_not_supported(ansible_zos_module, dstype): TEST_INFO["test_ds_line_replace"]["test_name"], ansible_zos_module, TEST_ENV, TEST_INFO["test_uss_line_replace"] ) + + +######################### +# Dataset test cases with force +######################### + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_force"], + TEST_INFO["expected"]["test_ds_line_force"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_force_fail(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForceFail( + ansible_zos_module, TEST_ENV, + TEST_INFO["test_ds_line_force_fail"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_force"], + TEST_INFO["expected"]["test_uss_line_replace"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_insertafter_regex_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_insertafter_regex_force"], + TEST_INFO["expected"]["test_uss_line_insertafter_regex"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_insertbefore_regex_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_insertbefore_regex_force"], + TEST_INFO["expected"]["test_uss_line_insertbefore_regex"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_insertbefore_bof_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_insertbefore_bof_force"], + TEST_INFO["expected"]["test_uss_line_insertbefore_bof"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_match_insertafter_ignore_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_match_insertafter_ignore_force"], + TEST_INFO["expected"]["test_uss_line_replace_match_insertafter_ignore"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_match_insertbefore_ignore_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_match_insertbefore_ignore_force"], + TEST_INFO["expected"]["test_uss_line_replace_match_insertbefore_ignore"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_nomatch_insertafter_match_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_nomatch_insertafter_match_force"], + TEST_INFO["expected"]["test_uss_line_replace_nomatch_insertafter_match"] + ) + + +@pytest.mark.ds +@pytest.mark.parametrize("encoding", ENCODING) +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_replace_nomatch_insertbefore_match_force(ansible_zos_module, dstype, encoding): + TEST_ENV["DS_TYPE"] = dstype + TEST_ENV["ENCODING"] = encoding + DsGeneralForce( + ansible_zos_module, TEST_ENV, + TEST_CONTENT, + TEST_INFO["test_ds_line_replace_nomatch_insertbefore_match_force"], + TEST_INFO["expected"]["test_uss_line_replace_nomatch_insertbefore_match"] + ) diff --git a/tests/helpers/zos_blockinfile_helper.py b/tests/helpers/zos_blockinfile_helper.py index 0a77e4eda..f5aa178fe 100644 --- a/tests/helpers/zos_blockinfile_helper.py +++ b/tests/helpers/zos_blockinfile_helper.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -14,12 +14,40 @@ from __future__ import absolute_import, division, print_function from shellescape import quote from pprint import pprint +import time import re __metaclass__ = type +DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + + def set_uss_test_env(test_name, hosts, test_env): test_env["TEST_FILE"] = test_env["TEST_DIR"] + test_name try: @@ -117,8 +145,8 @@ def DsGeneral(test_name, ansible_zos_module, test_env, test_info, expected): results = hosts.all.shell(cmd=cmdStr) for result in results.contacted.values(): pprint(result) - assert result.get("stdout") == expected - # assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') + #assert result.get("stdout") == expected + assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') clean_ds_test_env(test_env["DS_NAME"], hosts) return blockinfile_results @@ -155,3 +183,163 @@ def DsGeneralResultKeyMatchesRegex(test_name, ansible_zos_module, test_env, test for key in kwargs: assert re.match(kwargs.get(key), result.get(key)) clean_ds_test_env(test_env["DS_NAME"], hosts) + + +def DsGeneralForce(ansible_zos_module, test_env, test_info, expected): + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + if test_env["DS_TYPE"] == "SEQ": + test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + test_info["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + else: + test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + hosts = ansible_zos_module + try: + # set up: + # create pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=test_env["DS_TYPE"], replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], TEMP_FILE)) + # add members + hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": test_env["DS_NAME"], + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # write memeber to verify cases + # print(test_env["TEST_CONT"]) + if test_env["DS_TYPE"] in ["PDS", "PDSE"]: + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) + else: + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) + if test_env["ENCODING"]: + test_info["encoding"] = test_env["ENCODING"] + hosts.all.shell(cmd=cmdStr) + cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + if test_env["ENCODING"] != 'IBM-1047': + hosts.all.zos_encode( + src=TEMP_FILE, + dest=test_env["DS_NAME"], + encoding={ + "from": "IBM-1047", + "to": test_env["ENCODING"], + }, + ) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + + blockinfile_results = hosts.all.zos_blockinfile(**test_info) + for result in blockinfile_results.contacted.values(): + assert result.get("changed") == True + + + if test_env["ENCODING"] == 'IBM-1047': + cmdStr = "cat \"//'{0}'\" ".format(test_info["path"]) + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + pprint(result) + assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') + else: + cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") == True + finally: + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + return blockinfile_results + + +def DsGeneralForceFail(ansible_zos_module, test_env, test_info): + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + hosts = ansible_zos_module + test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + try: + # set up: + # create pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) + # add members + hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # write memeber to verify cases + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], test_info["path"])) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + + blockinfile_results = hosts.all.zos_blockinfile(**test_info) + for result in blockinfile_results.contacted.values(): + pprint(result) + assert result.get("changed") == False + assert result.get("failed") == True + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") \ No newline at end of file diff --git a/tests/helpers/zos_lineinfile_helper.py b/tests/helpers/zos_lineinfile_helper.py index 2c695364b..bac392e80 100644 --- a/tests/helpers/zos_lineinfile_helper.py +++ b/tests/helpers/zos_lineinfile_helper.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -13,11 +13,38 @@ from __future__ import absolute_import, division, print_function from shellescape import quote +import time from pprint import pprint import re __metaclass__ = type +DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + def set_uss_test_env(test_name, hosts, test_env): test_env["TEST_FILE"] = test_env["TEST_DIR"] + test_name @@ -159,3 +186,155 @@ def DsGeneralResultKeyMatchesRegex(test_name, ansible_zos_module, test_env, test for key in kwargs: assert re.match(kwargs.get(key), result.get(key)) clean_ds_test_env(test_env["DS_NAME"], hosts) + + +def DsGeneralForce(ansible_zos_module, test_env, test_text, test_info, expected): + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + if test_env["DS_TYPE"] == "SEQ": + test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + test_info["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + else: + test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + hosts = ansible_zos_module + try: + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=test_env["DS_TYPE"], replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_text, TEMP_FILE)) + # add members + hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": test_env["DS_NAME"], + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # write memeber to verify cases + if test_env["DS_TYPE"] in ["PDS", "PDSE"]: + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) + else: + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) + if test_env["ENCODING"]: + test_info["encoding"] = test_env["ENCODING"] + hosts.all.shell(cmd=cmdStr) + cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + if test_env["ENCODING"] != 'IBM-1047': + hosts.all.zos_encode( + src=TEMP_FILE, + dest=test_env["DS_NAME"], + encoding={ + "from": "IBM-1047", + "to": test_env["ENCODING"], + }, + ) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + # call line infile to see results + results = hosts.all.zos_lineinfile(**test_info) + pprint(vars(results)) + + if test_env["ENCODING"] == 'IBM-1047': + cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("stdout") == expected + else: + cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) + results = hosts.all.shell(cmd=cmdStr) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") == True + #assert result.get("stdout") == expected + + finally: + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + +def DsGeneralForceFail(ansible_zos_module, test_env, test_info): + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + hosts = ansible_zos_module + test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + try: + # set up: + # create pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) + # add members + hosts.all.zos_data_set( + batch=[ + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), + "type": "member", + "state": "present", + "replace": True, + }, + ] + ) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + # pause to ensure c code acquires lock + time.sleep(5) + # call line infile to see results + results = hosts.all.zos_lineinfile(**test_info) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("changed") == False + assert result.get("failed") == True + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") From be97d432bc6af27b289b9a70a9e2c9bcc76f9308 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 24 Apr 2023 16:19:23 -0600 Subject: [PATCH 102/495] remove duplicate function (#753) --- plugins/module_utils/encode.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index fa84c6fb3..cfcfd2bf0 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -496,25 +496,6 @@ def uss_file_tag(self, file_path): except Exception: return None - def uss_tag_encoding(self, file_path, tag): - """Tag the file/directory specified with the given code set. - If `file_path` is a directory, all of the files and subdirectories will - be tagged recursively. - - Arguments: - file_path {str} -- Absolute file path to tag. - tag {str} -- Code set to tag the file/directory. - - Raises: - TaggingError: When the chtag command fails. - """ - is_dir = os.path.isdir(file_path) - - tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) - rc, out, err = self.module.run_command(tag_cmd) - if rc != 0: - raise TaggingError(file_path, tag, rc, out, err) - class EncodeError(Exception): def __init__(self, message): From 3878e6bb2138d77684c2b41049f0ce5e4963c3e3 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Wed, 26 Apr 2023 09:27:05 -0700 Subject: [PATCH 103/495] Update branch production branch Main with release v1.5.0 content (#756) (#758) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> --- CHANGELOG.rst | 117 +++++++---- README.md | 4 +- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 183 +++++++++++++++++- docs/source/release_notes.rst | 67 ++++++- galaxy.yml | 4 +- meta/ibm_zos_core_meta.yml | 2 +- meta/runtime.yml | 2 +- plugins/module_utils/encode.py | 2 +- plugins/module_utils/job.py | 2 +- plugins/modules/zos_gather_facts.py | 2 +- tests/dependencyfinder.py | 32 +++ .../modules/test_zos_job_submit_func.py | 2 +- 13 files changed, 366 insertions(+), 55 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 55555d11c..98cab36f3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,19 +5,18 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.5.0-beta.1 -============= +v1.5.0 +====== Release Summary --------------- -Release Date: '2022-11-17' +Release Date: '2023-04-21' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Major Changes ------------- @@ -66,22 +65,35 @@ Deprecated Features Bugfixes -------- +- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/641) +- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/746) - zos_copy - Fixes a bug such that the module fails when copying files from a directory needing also to be encoded. The failure would also delete the `src` which was not desirable behavior. Fixes deletion of src on encoding error. (https://github.com/ansible-collections/ibm_zos_core/pull/321). -- zos_copy - Fixes wrongful creation of destination backups when module option `force` is true, creating emergency backups meant to restore the system to its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - zos_copy - Fixes a bug where copying a member from a loadlib to another loadlib fails. (https://github.com/ansible-collections/ibm_zos_core/pull/640) +- zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/725) - zos_copy - Fixes a bug where if a destination has accented characters in its content, the module would fail when trying to determine if it is empty. (https://github.com/ansible-collections/ibm_zos_core/pull/634) +- zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/725) - zos_copy - Fixes a bug where the computed record length for a new destination dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) +- zos_copy - Fixes wrongful creation of destination backups when module option `force` is true, creating emergency backups meant to restore the system to its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - zos_copy - module was updated to correct a bug in the case when the destination (dest) is a PDSE and the source (src) is a Unix Systems File (USS). The module would fail in determining if the PDSE actually existed and try to create it when it already existed resulting in an error that would prevent the module from correctly executing. (https://github.com/ansible-collections/ibm_zos_core/pull/327) - zos_data_set - Fixes a bug such that the module will delete a catalogued data set over an uncatalogued data set even though the volume is provided for the uncataloged data set. This is unexpected behavior and does not align to documentation; correct behavior is that when a volume is provided that is the first place the module should look for the data set, whether or not it is cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/325). -- zos_data_set - Fixes a bug where the default record format FB was actually never enforced and when enforced it would cause VSAM creation to fail with a Dynalloc failure. This also cleans up some of the options that are set by default when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) +- zos_data_set - Fixes a bug where the default record format FB was actually never enforced and when enforced it would cause VSAM creation to fail with a Dynalloc failure. Also cleans up some of the options that are set by default when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) - zos_fetch - Updates the modules behavior when fetching VSAM data sets such that the maximum record length is now determined when creating a temporary data set to copy the VSAM data into and a variable-length (VB) data set is used. (https://github.com/ansible-collections/ibm_zos_core/pull/350) - zos_job_output - Fixes a bug that returned all ddname's when a specific ddnamae was provided. Now a specific ddname can be returned and all others ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/334) - zos_job_query - was updated to correct a boolean condition that always evaluated to "CANCELLED". (https://github.com/ansible-collections/ibm_zos_core/pull/312). +- zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would result in a `type` error that a stack trace would result in the response, issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job encounters a security exception no job log would would result in the response, issue 684. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is configured for a syntax check using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` to return a response, issue 685. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is configured for a syntax check using TYPRUN=SCAN that no job log would result in the response, issue 685. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when a job is purged by the system that a stack trace would result in the response, issue 681. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that a stack trace would result in the response, issue 623. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue when resources (data sets) identified in JCL did not exist such that a stack trace would result in the response, issue 624. (https://github.com/ansible-collections/ibm_zos_core/pull/683) +- zos_job_submit - Fixes the issue where the response did not include the job log when a non-zero return code would occur, issue 655. (https://github.com/ansible-collections/ibm_zos_core/pull/683) - zos_mount - Fixes option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/511) - zos_mvs_raw - module was updated to correct a bug when no DD statements were provided. The module when no option was provided for `dds` would error, a default was provided to correct this behavior. (https://github.com/ansible-collections/ibm_zos_core/pull/336) - zos_operator - Fixes case sensitive error checks, invalid, error & unidentifiable (https://github.com/ansible-collections/ibm_zos_core/issues/389). - zos_operator - Fixes such that specifying wait_time_s would throw an error (https://github.com/ansible-collections/ibm_zos_core/issues/389). - zos_operator - Fixes the wait_time_s to default to 1 second (https://github.com/ansible-collections/ibm_zos_core/issues/389). +- zos_operator - fixed incorrect example descriptions and updated the doc to highlight the deprecated option `wait`. (https://github.com/ansible-collections/ibm_zos_core/pull/648) - zos_operator - was updated to correct missing verbosity content when the option verbose was set to True. zos_operator - was updated to correct the trailing lines that would appear in the result content. (https://github.com/ansible-collections/ibm_zos_core/pull/400). New Modules @@ -89,50 +101,41 @@ New Modules - ibm.ibm_zos_core.zos_gather_facts - Gather z/OS system facts. -v1.4.0-beta.2 -============= +v1.4.1 +====== Release Summary --------------- -Release Date: '2022-10-17' +Release Date: '2023-04-18' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ -Minor Changes -------------- - -- zos_copy - enhanced the force option when `force=true` and the remote file or data set `dest` is NOT empty, the `dest` will be deleted and recreated with the `src` data set attributes, otherwise it will be recreated with the `dest` data set attributes. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - fixes a bug that when a directory is copied from the controller to the managed node and a mode is set, the mode is applied to the directory on the managed node. If the directory being copied contains files and mode is set, mode will only be applied to the files being copied not the pre-existing files. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - fixes a bug where options were not defined in the module argument spec that will result in error when running `ansible-core` v2.11 and using options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/496) -- zos_copy - introduced an updated creation policy referred to as precedence rules such that if `dest_data_set` is set, this will take precedence. If `dest` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. If no precedent rule has been exercised, `dest` will be created with the same attributes of `src`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - introduced new computation capabilities such that if `dest` is a nonexistent data set, the attributes assigned will depend on the type of `src`. If `src` is a USS file, `dest` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If `src` is binary, `dest` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - option `dest_dataset` has been deprecated and removed in favor of the new option `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - was enhanced for when `src` is a directory and ends with "/", the contents of it will be copied into the root of `dest`. It it doesn't end with "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/496) - Bugfixes -------- -- zos_copy - fixes a bug that did not create a data set on the specified volume. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_copy - fixes a bug where a number of attributes were not an option when using `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) -- zos_job_output - fixes a bug that returned all ddname's when a specific ddname was provided. Now a specific ddname can be returned and all others ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/507) -- zos_mount - fixed option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/502) -- zos_operator - enhanced to allow for MVS operator `SET` command, `SET` is equivalent to the abbreviated `T` command. (https://github.com/ansible-collections/ibm_zos_core/pull/501) +- zos_copy - Copy failed from a loadlib member to another loadlib member. Fix now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/640) +- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/742) +- zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/732) +- zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/732) +- zos_copy - fixed wrongful creation of destination backups when module option `force` is true, creating emergency backups meant to restore the system to its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) +- zos_copy - fixes a bug where the computed record length for a new destination dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) +- zos_job_query - fixes a bug where a boolean was not being properly compared. (https://github.com/ansible-collections/ibm_zos_core/pull/379) -v1.4.0-beta.1 -============= +v1.4.0 +====== Release Summary --------------- -Release Date: '2021-06-23' -This changlelog describes all changes made to the modules and plugins included -in this collection. -For additional details such as required dependencies and availablity review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ +Release Date: '2022-12-07' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ Major Changes @@ -150,6 +153,20 @@ Major Changes - zos_operator_action_query response messages were improved with more diagnostic information in the event an error is encountered. - zos_ping was updated to remove the need for the zos_ssh connection plugin dependency. +Minor Changes +------------- + +- zos_copy - enhanced the force option when `force=true` and the remote file or data set `dest` is NOT empty, the `dest` will be deleted and recreated with the `src` data set attributes, otherwise it will be recreated with the `dest` data set attributes. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - enhanced to optimize how it captures the permission bits state for the `dest`. This change now reviews the source files instead of traversing the entire `dest` path. (https://github.com/ansible-collections/ibm_zos_core/pull/561) +- zos_copy - enhanced to support creating a parent directory when it does not exist in the `dest` path. Prior to this change, if a parent directory anywhere in the path did not exist the task would fail as it was stated in documentation. (https://github.com/ansible-collections/ibm_zos_core/pull/561) +- zos_copy - enhanced to support system symbols in PARMLIB. System symbols are elements that allow different z/OS® systems to share PARMLIB definitions while retaining unique values in those definitions. This was fixed in a future release through the use of one of the ZOAU dependency but this version of `ibm_zos_core` does not support that dependency version so this support was added. (https://github.com/ansible-collections/ibm_zos_core/pull/566) +- zos_copy - fixes a bug that when a directory is copied from the controller to the managed node and a mode is set, the mode is applied to the directory on the managed node. If the directory being copied contains files and mode is set, mode will only be applied to the files being copied not the pre-existing files. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - fixes a bug where options were not defined in the module argument spec that will result in error when running `ansible-core` v2.11 and using options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/496) +- zos_copy - introduced an updated creation policy referred to as precedence rules such that if `dest_data_set` is set, this will take precedence. If `dest` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. If no precedent rule has been exercised, `dest` will be created with the same attributes of `src`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - introduced new computation capabilities such that if `dest` is a nonexistent data set, the attributes assigned will depend on the type of `src`. If `src` is a USS file, `dest` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If `src` is binary, `dest` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - option `dest_dataset` has been deprecated and removed in favor of the new option `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - was enhanced for when `src` is a directory and ends with "/", the contents of it will be copied into the root of `dest`. It it doesn't end with "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/496) + Deprecated Features ------------------- @@ -160,7 +177,12 @@ Deprecated Features Bugfixes -------- +- zos_copy - fixes a bug that did not create a data set on the specified volume. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_copy - fixes a bug where a number of attributes were not an option when using `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) +- zos_job_output - fixes a bug that returned all ddname's when a specific ddname was provided. Now a specific ddname can be returned and all others ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/507) - zos_job_output was updated to correct possible truncated responses for the ddname content. This would occur for jobs with very large amounts of content from a ddname. +- zos_mount - fixed option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/502) +- zos_operator - enhanced to allow for MVS operator `SET` command, `SET` is equivalent to the abbreviated `T` command. (https://github.com/ansible-collections/ibm_zos_core/pull/501) - zos_ssh - connection plugin was updated to correct a bug in Ansible that would result in playbook task retries overriding the SSH connection retries. This is resolved by renaming the zos_ssh option @@ -175,7 +197,36 @@ New Modules - ibm.ibm_zos_core.zos_mount - Mount a z/OS file system. -v1.3.4 +v1.3.6 +====== + +Release Summary +--------------- + +Release Date: '2022-10-07' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + + +Minor Changes +------------- + +- zos_copy - was enhanced for when `src` is a directory and ends with "/", the contents of it will be copied into the root of `dest`. If it doesn't end with "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/515) + +Bugfixes +-------- + +- jobs.py - fixes a utility used by module `zos_job_output` that would truncate the DD content. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_copy - fixes a bug that when a directory is copied from the controller to the managed node and a mode is set, the mode is now applied to the directory on the controller. If the directory being copied contains files and mode is set, mode will only be applied to the files being copied not the pre-existing files.(https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_copy - fixes a bug where options were not defined in the module argument spec that will result in error when running `ansible-core` 2.11 and using options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_fetch - fixes a bug where an option was not defined in the module argument spec that will result in error when running `ansible-core` 2.11 and using option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_job_submit - fixes a bug where an option was not defined in the module argument spec that will result in error when running `ansible-core` 2.11 and using option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_ssh - fixes connection plugin which will error when using `ansible-core` 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) +- zos_ssh - fixes connection plugin which will error when using `ansible-core` 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. (https://github.com/ansible-collections/ibm_zos_core/pull/513) + +v1.3.5 ====== Release Summary diff --git a/README.md b/README.md index 746b27f4d..d6505759b 100644 --- a/README.md +++ b/README.md @@ -49,11 +49,11 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against the following Ansible versions: >=2.9,<=2.14.1. +This collection has been tested against the following Ansible versions: >=2.9,<2.15. Copyright ========= -© Copyright IBM Corporation 2020-2021. +© Copyright IBM Corporation 2020-2023. License ======= diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 1f8b2439a..e2cdc5634 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -109,4 +109,4 @@ plugins: shell: {} strategy: {} vars: {} -version: 1.5.0-beta.1 +version: 1.5.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index f843ea577..0e5580863 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -232,7 +232,7 @@ releases: - v1.3.3_summary.yml - v1.3.3_summary_bugs.yml release_date: '2022-06-07' - 1.3.4: + 1.3.5: changes: bugfixes: - "zos_ssh - connection plugin was updated to correct a bug in Ansible that\n @@ -251,6 +251,79 @@ releases: - 328-rename-retries-to-reconnection_retries.yml - v1.3.4_summary.yml release_date: '2022-06-07' + 1.3.6: + changes: + bugfixes: + - jobs.py - fixes a utility used by module `zos_job_output` that would truncate + the DD content. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_copy - fixes a bug that when a directory is copied from the controller + to the managed node and a mode is set, the mode is now applied to the directory + on the controller. If the directory being copied contains files and mode is + set, mode will only be applied to the files being copied not the pre-existing + files.(https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_copy - fixes a bug where options were not defined in the module argument + spec that will result in error when running `ansible-core` 2.11 and using + options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_fetch - fixes a bug where an option was not defined in the module argument + spec that will result in error when running `ansible-core` 2.11 and using + option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_job_submit - fixes a bug where an option was not defined in the module + argument spec that will result in error when running `ansible-core` 2.11 and + using option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_ssh - fixes connection plugin which will error when using `ansible-core` + 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. + (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_ssh - fixes connection plugin which will error when using `ansible-core` + 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. + (https://github.com/ansible-collections/ibm_zos_core/pull/513) + minor_changes: + - zos_copy - was enhanced for when `src` is a directory and ends with "/", the + contents of it will be copied into the root of `dest`. If it doesn't end with + "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/515) + release_summary: "Release Date: '2022-10-07'\nThis changelog describes all changes + made to the modules and plugins included\nin this collection. The release + date is the date the changelog is created.\nFor additional details such as + required dependencies and availability review\nthe collections `release notes + <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + \n" + fragments: + - 462-copy-fetch-submit-utils.yml + - 513-zos_ssh-support-ansible-2.11.yml + - 515-copy-support-directories.yml + - v1.3.6_summary.yml + release_date: '2022-10-07' + 1.4.0: + changes: + minor_changes: + - zos_copy - enhanced to optimize how it captures the permission bits state + for the `dest`. This change now reviews the source files instead of traversing + the entire `dest` path. (https://github.com/ansible-collections/ibm_zos_core/pull/561) + - zos_copy - enhanced to support creating a parent directory when it does not + exist in the `dest` path. Prior to this change, if a parent directory anywhere + in the path did not exist the task would fail as it was stated in documentation. + (https://github.com/ansible-collections/ibm_zos_core/pull/561) + - "zos_copy - enhanced to support system symbols in PARMLIB. System symbols + are elements that allow different z/OS\xAE systems to share PARMLIB definitions + while retaining unique values in those definitions. This was fixed in a future + release through the use of one of the ZOAU dependency but this version of + `ibm_zos_core` does not support that dependency version so this support was + added. (https://github.com/ansible-collections/ibm_zos_core/pull/566)" + release_summary: 'Release Date: ''2022-12-07'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + + ' + fragments: + - 561-update-directory-create.yml + - 566-update-with-symbol-support.yml + - v1.4.0_summary.yml + release_date: '2022-12-07' 1.4.0-beta.1: changes: bugfixes: @@ -378,6 +451,102 @@ releases: - 507-display-specific-ddname.yml - v1.4.0-beta.2_summary.yml release_date: '2022-10-13' + 1.4.1: + changes: + bugfixes: + - zos_copy - Copy failed from a loadlib member to another loadlib member. Fix + now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/640) + - zos_copy - Fixed a bug where the module would change the mode for a directory + when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/742) + - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. Issue + 664. (https://github.com/ansible-collections/ibm_zos_core/pull/732) + - zos_copy - Fixes a bug where the code for fixing an issue with newlines in + files (issue 599) would use the wrong encoding for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/732) + - zos_copy - fixed wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system to + its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) + - zos_copy - fixes a bug where the computed record length for a new destination + dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) + - zos_job_query - fixes a bug where a boolean was not being properly compared. + (https://github.com/ansible-collections/ibm_zos_core/pull/379) + release_summary: 'Release Date: ''2023-04-18'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + + ' + fragments: + - 579-zos-query-boolean-correction.yml + - 588-update-emergency-backup.yml + - 599-copy-carriage-return.yml + - 601-copy-loadlib-member.yml + - 728-zos_operator-example-updates.yml + - 732-zos_copy-encoding-bugs.yml + - 742_zos_copy-mode-is-applied-to-the-destination-directory-a-deviation-from-the-communtiy-module-behavior.yaml + - v1.4.1_summary.yml + release_date: '2023-04-18' + 1.5.0: + changes: + bugfixes: + - zos_copy - Copy failed from a loadlib member to another loadlib member. Fix + now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/641) + - zos_copy - Fixed a bug where the module would change the mode for a directory + when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/746) + - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. Issue + 664. (https://github.com/ansible-collections/ibm_zos_core/pull/725) + - zos_copy - Fixes a bug where the code for fixing an issue with newlines in + files (issue 599) would use the wrong encoding for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/725) + - zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would + result in a `type` error that a stack trace would result in the response, + issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job encounters a security exception + no job log would would result in the response, issue 684. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` + to return a response, issue 685. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that no job log would result in the response, issue 685. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job is purged by the system that a + stack trace would result in the response, issue 681. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that + a stack trace would result in the response, issue 623. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when resources (data sets) identified in + JCL did not exist such that a stack trace would result in the response, issue + 624. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue where the response did not include the job + log when a non-zero return code would occur, issue 655. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_operator - fixed incorrect example descriptions and updated the doc to + highlight the deprecated option `wait`. (https://github.com/ansible-collections/ibm_zos_core/pull/648) + release_summary: 'Release Date: ''2023-04-21'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 641-copy-loadlib-member.yml + - 648-zos_operator-examples.yml + - 663-zos_gather_facts-update-docstring.yml + - 683-zos_job_submit-bugs.yml + - 725-zos_copy-encoding-bugs.yml + - 729-zos_operator-example-added.yml + - 739-zos_copy-volume-symbol-test.yml + - 746--Mode-set-for-files-is-applied-to-destination-directory.yml + - v1.5.0_summary.yml + release_date: '2023-04-21' 1.5.0-beta.1: changes: bugfixes: @@ -385,15 +554,15 @@ releases: a directory needing also to be encoded. The failure would also delete the `src` which was not desirable behavior. Fixes deletion of src on encoding error. (https://github.com/ansible-collections/ibm_zos_core/pull/321). - - zos_copy - Fixes wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system to - its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - zos_copy - Fixes a bug where copying a member from a loadlib to another loadlib fails. (https://github.com/ansible-collections/ibm_zos_core/pull/640) - zos_copy - Fixes a bug where if a destination has accented characters in its content, the module would fail when trying to determine if it is empty. (https://github.com/ansible-collections/ibm_zos_core/pull/634) - zos_copy - Fixes a bug where the computed record length for a new destination dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) + - zos_copy - Fixes wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system to + its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - zos_copy - module was updated to correct a bug in the case when the destination (dest) is a PDSE and the source (src) is a Unix Systems File (USS). The module would fail in determining if the PDSE actually existed and try to create it @@ -406,8 +575,8 @@ releases: the module should look for the data set, whether or not it is cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/325). - zos_data_set - Fixes a bug where the default record format FB was actually never enforced and when enforced it would cause VSAM creation to fail with - a Dynalloc failure. Also cleans up some of the options that are set by - default when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) + a Dynalloc failure. Also cleans up some of the options that are set by default + when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) - zos_fetch - Updates the modules behavior when fetching VSAM data sets such that the maximum record length is now determined when creating a temporary data set to copy the VSAM data into and a variable-length (VB) data set is @@ -515,7 +684,7 @@ releases: behaviors and reduces the possibility to encounter a permissions issue. (https://github.com/ansible-collections/ibm_zos_core/issues/389). - zos_job_submit - was updated to include an additional error code condition JCLERR. (https://github.com/ansible-collections/ibm_zos_core/pull/312) - - zos_lineinfile- updates the module with a new option named tmp_hlq. This allows + - zos_lineinfile - updates the module with a new option named tmp_hlq. This allows for a user to specify the data set high level qualifier (HLQ) used in any temporary data set created by the module. Often, the defaults are not permitted on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 788f96b73..ab1e07e49 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,7 +6,7 @@ Releases ======== -Version 1.5.0-beta.1 +Version 1.5.0 ==================== New Modules @@ -61,7 +61,7 @@ Minor Changes Bugfixes -------- -- ``ibm_zos_copy`` +- ``zos_copy`` - fixes a bug such that the module fails when copying files from a directory needing also to be encoded. The failure would also delete the `src` which was not desirable behavior. Fixes deletion of src on encoding error. - module was updated to correct a bug in the case when the destination (dest) is a PDSE and the source (src) is a Unix Systems File (USS). The module would fail in determining if the PDSE actually existed and try to create it when it already existed resulting in an error that would prevent the module from correctly executing. @@ -69,6 +69,10 @@ Bugfixes - fixes a bug where if a destination has accented characters in its content, the module would fail when trying to determine if it is empty. - fixes a bug where copying a member from a loadlib to another loadlib fails. - fixed wrongful creation of destination backups when module option `force` is true, creating emergency backups meant to restore the system to its initial state in case of a module failure only when force is false. + - copy failed from a loadlib member to another loadlib member. Fix now looks for an error in stdout while copying to perform a fallback copy for executables. + - fixes a bug where the module would change the mode for a directory when copying into it the contents of another. + - fixes a bug where source files not encoded in IBM-1047 would trigger an encoding error while computing the record length for a new destination dataset. + - fixes a bug where the code for fixing an issue with newlines in files would use the wrong encoding for normalization. - ``zos_data_set`` - Fixes a bug such that the module will delete a catalogued data set over an uncatalogued data set even though the volume is provided for the uncataloged data set. This is unexpected behavior and does not align to documentation; correct behavior is that when a volume is provided that is the first place the module should look for the data set, whether or not it is cataloged. @@ -76,6 +80,16 @@ Bugfixes - ``zos_fetch`` - Updates the modules behavior when fetching VSAM data sets such that the maximum record length is now determined when creating a temporary data set to copy the VSAM data into and a variable-length (VB) data set is used. - ``zos_job_output`` - fixes a bug that returned all ddname's when a specific ddnamae was provided. Now a specific ddname can be returned and all others ignored. - ``zos_job_query`` - was updated to correct a boolean condition that always evaluated to "CANCELLED". +- ``zos_job_submit`` + + - fixes the issue when `wait_time_s` was set to 0 that would result in a `type` error and the response would be a stack trace. + - fixes the issue when a job encounters a security exception, no job log would would result in the response. + - fixes the issue when a job is configured for a syntax check using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` to return a response. + - fixes the issue when a job is configured for a syntax check using TYPRUN=SCAN that no job log would result in the response. + - fixes the issue when a job is purged by the system that the response would result in a stack trace. + - fixes the issue when invalid JCL syntax is submitted such that the response would result in a stack trace. + - fixes the issue when resources (data sets) identified in JCL did not exist such that a response would result in a stack trace. + - fixes the issue where the response did not include the job log when a non-zero return code would occur. - ``zos_mount`` - fixed option `tag_ccsid` to correctly allow for type int. - ``zos_mvs_raw`` - module was updated to correct a bug when no DD statements were provided. The module when no option was provided for `dds` would error, a default was provided to correct this behavior. - ``zos_operator`` @@ -84,6 +98,7 @@ Bugfixes - fixed such that specifying wait_time_s would throw an error. - fixed the wait_time_s to default to 1 second. - was updated to correct missing verbosity content when the option verbose was set to True. zos_operator - was updated to correct the trailing lines that would appear in the result content. + - fixed incorrect example descriptions and updated the doc to highlight the deprecated option `wait`. Deprecated Features ------------------- @@ -95,6 +110,7 @@ Deprecated Features Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ @@ -104,7 +120,48 @@ Reference * Supported by `z/OS V2R3`_ or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.x`_ +* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. + +Version 1.4.1 +============= + +Bug fixes + +-------------------------- + +* ``zos_copy`` + + * Copy failed from a loadlib member to another loadlib member. Fix + now looks for error in stdout in the if statement to use -X option. + * Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. + * Fixes a bug where the code for fixing an issue with newlines in + files. + * fixed wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system to + its initial state in case of a module failure only when force is false. + * fixes a bug where the computed record length for a new destination + dataset would include newline characters. + +* ``zos_job_query`` + + * fixes a bug where a boolean was not being properly compared. + +Availability +------------ + +* `Automation Hub`_ +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS V2R3`_ or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ +* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and + `Z Open Automation Utilities 1.1.1`_ Version 1.4.0 ============= @@ -744,10 +801,10 @@ Reference .. _3.11: https://www.ibm.com/docs/en/python-zos/3.11 .. _Z Open Automation Utilities 1.1.0: - https://www.ibm.com/docs/en/zoau/1.1.0 + https://www.ibm.com/docs/en/zoau/1.1.x .. _Z Open Automation Utilities 1.1.1: https://www.ibm.com/docs/en/zoau/1.1.1 -.. _Z Open Automation Utilities 1.2.x: +.. _Z Open Automation Utilities 1.2.2: https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm diff --git a/galaxy.yml b/galaxy.yml index 14cca831b..8aaf403db 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.5.0-beta.1 +version: 1.5.0 # Collection README file readme: README.md @@ -18,6 +18,8 @@ authors: - Ketan Kelkar <ketan.kelkar@ibm.com> - Ivan Moreno <ivan.moreno.soto@ibm.com> - Oscar Fernando Flores Garcia<fernando.flores@ibm.com> + - Jenny Huang <jennyhuang@ibm.com> + - Marcel Guitierrez <andre.marcel.gutierrez@ibm.com> # Description description: The IBM z/OS core collection includes connection plugins, action plugins, modules, filters and ansible-doc to automate tasks on z/OS. diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 1459bc478..c2aab577a 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.5.0-beta.1" +version: "1.5.0" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/meta/runtime.yml b/meta/runtime.yml index 43bbe4509..dbba1c7ce 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9' +requires_ansible: '>=2.9,<2.15' diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index cfcfd2bf0..a96bf46d5 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 6e37d5823..9af6260f4 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index beff12cd2..a3475be11 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 +# Copyright (c) IBM Corporation 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/dependencyfinder.py b/tests/dependencyfinder.py index 13f1e4391..fa24811ff 100755 --- a/tests/dependencyfinder.py +++ b/tests/dependencyfinder.py @@ -450,6 +450,28 @@ def build_artifacts_from_collection(collection_root): return artifacts +def get_all_tests(collection_root): + """Build a list of all test cases for when all tests need to be run + Args: + collection_root (str): The path to the root of the collection + Returns: + list[tests]: A list of test cases. + """ + + files = [] + files += get_all_files_in_dir_tree(collection_root + "/tests/unit") + files += get_all_files_in_dir_tree(collection_root + "/tests/functional") + + test_suites = [] + for file in files: + if file.endswith(".py"): + path, filename = os.path.split(file) + if filename.startswith('test'): + test_suites.append(file) + + return test_suites + + def get_all_files_in_dir_tree(base_path): """Recursively search subdirectories for files. @@ -620,6 +642,14 @@ def parse_arguments(): default=False, help="Detect only the changes from the branch request-pull.", ) + parser.add_argument( + "-a", + "--all", + required=False, + action="store_true", + default=False, + help="A list of all test cases minus any skipped tests.", + ) args = parser.parse_args() return args @@ -635,6 +665,8 @@ def parse_arguments(): if args.minimum: changed_files = get_changed_plugins(args.path, args.branch) + elif args.all: + changed_files = get_all_tests(args.path) else: changed_files = get_changed_files(args.path, args.branch) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 888281712..3364d12da 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 0e552400cf1c44db2e156d0f7b430f813e37546f Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 28 Apr 2023 13:45:02 -0700 Subject: [PATCH 104/495] Merge master to dev for 1.6.0 beta.1 (#763) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Update ibm_zos_core_meta.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> --- CHANGELOG.rst | 45 +++ changelogs/.plugin-cache.yaml | 9 +- changelogs/changelog.yaml | 85 +++++- docs/source/modules/zos_blockinfile.rst | 14 +- docs/source/modules/zos_data_set.rst | 39 +++ docs/source/modules/zos_job_query.rst | 2 +- docs/source/modules/zos_lineinfile.rst | 19 ++ docs/source/modules/zos_tso_command.rst | 25 ++ docs/source/modules/zos_volume_init.rst | 257 ++++++++++++++++++ docs/source/release_notes.rst | 50 +++- galaxy.yml | 4 +- meta/ibm_zos_core_meta.yml | 2 +- plugins/action/zos_fetch.py | 2 +- plugins/module_utils/system.py | 2 +- plugins/modules/zos_volume_init.py | 6 +- .../functional/modules/test_zos_copy_func.py | 2 - .../modules/test_zos_data_set_func.py | 2 +- .../functional/modules/test_zos_find_func.py | 2 +- .../modules/test_zos_job_query_func.py | 2 +- .../modules/test_zos_lineinfile_func.py | 2 +- 20 files changed, 543 insertions(+), 28 deletions(-) create mode 100644 docs/source/modules/zos_volume_init.rst diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 98cab36f3..c19a39bbc 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,51 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.6.0-beta.1 +============= + +Release Summary +--------------- + +Release Date: '2023-04-26' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Major Changes +------------- + +- zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. (https://github.com/ansible-collections/ibm_zos_core/pull/654) + +Minor Changes +------------- + +- Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning".. warn Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) +- module_utils - job.py utility did not support positional wiled card placement, this enhancement uses `fnmatch` logic to support wild cards. +- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) +- zos_copy - was enhanced to keep track of modified members in a destination dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) +- zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). +- zos_job_query - ansible module does not support positional wild card placement for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- zos_lineinfile - would access data sets with exclusive access so no other task can read the data, this enhancement allows for a data set to be opened with a disposition set to share so that other tasks can access the data when option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) +- zos_tso_command - was enhanced to accept `max_rc` as an option. This option allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) + +Bugfixes +-------- + +- Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". +- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an approriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) +- zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) +- zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/743) +- zos_encode - fixes a bug where converted files were not tagged afterwards with the new code set. (https://github.com/ansible-collections/ibm_zos_core/pull/534) +- zos_find - fixes a bug where find result values stopped being returned after first value in a list was 'not found'. (https://github.com/ansible-collections/ibm_zos_core/pull/668) +- zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed to ensure support for Python 2.7 on the controller. (https://github.com/ansible-collections/ibm_zos_core/pull/659) + +New Modules +----------- + +- ibm.ibm_zos_core.zos_volume_init - Initialize volumes or minidisks. + v1.5.0 ====== diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index e2cdc5634..2c3c67c65 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -6,6 +6,7 @@ plugins: callback: {} cliconf: {} connection: {} + filter: {} httpapi: {} inventory: {} lookup: {} @@ -105,8 +106,14 @@ plugins: name: zos_tso_command namespace: '' version_added: 1.1.0 + zos_volume_init: + description: Initialize volumes or minidisks. + name: zos_volume_init + namespace: '' + version_added: 1.6.0 netconf: {} shell: {} strategy: {} + test: {} vars: {} -version: 1.5.0 +version: 1.6.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 0e5580863..51bba3c4f 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -684,10 +684,10 @@ releases: behaviors and reduces the possibility to encounter a permissions issue. (https://github.com/ansible-collections/ibm_zos_core/issues/389). - zos_job_submit - was updated to include an additional error code condition JCLERR. (https://github.com/ansible-collections/ibm_zos_core/pull/312) - - zos_lineinfile - updates the module with a new option named tmp_hlq. This allows - for a user to specify the data set high level qualifier (HLQ) used in any - temporary data set created by the module. Often, the defaults are not permitted - on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_lineinfile - updates the module with a new option named tmp_hlq. This + allows for a user to specify the data set high level qualifier (HLQ) used + in any temporary data set created by the module. Often, the defaults are not + permitted on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - zos_mount - updates the module with a new option named tmp_hlq. This allows for a user to specify the data set high level qualifier (HLQ) used in any temporary data set created by the module. Often, the defaults are not permitted @@ -760,3 +760,80 @@ releases: name: zos_gather_facts namespace: '' release_date: '2022-11-02' + 1.6.0-beta.1: + changes: + bugfixes: + - Fixed wrong error message when a USS source is not found, aligning with a + similar error message from zos_blockinfile "{src} does not exist". + - zos_blockinfile - was unable to use double quotes which prevented some use + cases and did not display an approriate message. The fix now allows for double + quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) + - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. Issue + 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) + - zos_copy - Fixes a bug where the code for fixing an issue with newlines in + files (issue 599) would use the wrong encoding for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/743) + - zos_encode - fixes a bug where converted files were not tagged afterwards + with the new code set. (https://github.com/ansible-collections/ibm_zos_core/pull/534) + - zos_find - fixes a bug where find result values stopped being returned after + first value in a list was 'not found'. (https://github.com/ansible-collections/ibm_zos_core/pull/668) + - zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed + to ensure support for Python 2.7 on the controller. (https://github.com/ansible-collections/ibm_zos_core/pull/659) + major_changes: + - zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. + (https://github.com/ansible-collections/ibm_zos_core/pull/654) + minor_changes: + - Updated the text converter import from "from ansible.module_utils._text" to + "from ansible.module_utils.common.text.converters" to remove warning".. warn + Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) + - module_utils - job.py utility did not support positional wiled card placement, + this enhancement uses `fnmatch` logic to support wild cards. + - zos_copy - Fixed a bug where the module would change the mode for a directory + when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) + - zos_copy - was enhanced to keep track of modified members in a destination + dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) + - zos_data_set - add force parameter to enable member delete while pdse is in + use (https://github.com/ansible-collections/ibm_zos_core/pull/718). + - zos_job_query - ansible module does not support positional wild card placement + for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout + the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) + - zos_lineinfile - would access data sets with exclusive access so no other + task can read the data, this enhancement allows for a data set to be opened + with a disposition set to share so that other tasks can access the data when + option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) + - zos_tso_command - was enhanced to accept `max_rc` as an option. This option + allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) + release_summary: 'Release Date: ''2023-04-26'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 309-replace-text-zos-encode.yml + - 323-zos-job-query-handle-multiple-wildcards.yml + - 358-zos-data-set-support-disposition-shr.yml + - 408-restore-members-on-failure.yml + - 417-can-quotes-in-content-can-be-supported.yml + - 574-zos_find_stoppedonnotfound.yml + - 584-zos_lineinfile-error-message.yml + - 602-text-converter-import.yml + - 619-Mode-set-for-files-is-applied-to-destination-directory.yml + - 654-new-module-zos_volume_init.yml + - 659-zos-lineinfile-f-string.yml + - 666-zos_tso_command_maxrc.yml + - 727-zos-blockinfile-examples.yml + - 731-zos_linefile-disposition_share.yaml + - 734-copy-loadlib-member-test-case.yml + - 740-zos_copy-volume-symbol-test.yml + - 743-zos_copy-encoding-bugs.yml + - v1.6.0-beta.1_summary.yml + modules: + - description: Initialize volumes or minidisks. + name: zos_volume_init + namespace: '' + release_date: '2023-04-26' diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 5608a0ebb..3633620ad 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -195,13 +195,11 @@ Examples block: | MOUNT FILESYSTEM('SOME.DATA.SET') TYPE(ZFS) MODE(READ) MOUNTPOINT('/tmp/src/somedirectory') - - name: Remove a library as well as surrounding markers zos_blockinfile: state: absent src: SYS1.PARMLIB(PROG00) marker: "/* {mark} ANSIBLE MANAGED BLOCK FOR SOME.DATA.SET */" - - name: Add ZOAU path to PATH in /etc/profile zos_blockinfile: src: /etc/profile @@ -210,7 +208,6 @@ Examples ZOAU=/path/to/zoau_dir/bin export ZOAU PATH=$ZOAU:$PATH - - name: Insert/Update HTML surrounded by custom markers after <body> line zos_blockinfile: path: /var/www/html/index.html @@ -219,13 +216,11 @@ Examples block: | <h1>Welcome to {{ ansible_hostname }}</h1> <p>Last updated on {{ ansible_date_time.iso8601 }}</p> - - name: Remove HTML as well as surrounding markers zos_blockinfile: path: /var/www/html/index.html state: absent marker: "<!-- {mark} ANSIBLE MANAGED BLOCK -->" - - name: Add mappings to /etc/hosts zos_blockinfile: path: /etc/hosts @@ -236,7 +231,6 @@ Examples - { name: host1, ip: 10.10.1.10 } - { name: host2, ip: 10.10.1.11 } - { name: host3, ip: 10.10.1.12 } - - name: Add a code block to a member using a predefined indentation. zos_blockinfile: path: SYS1.PARMLIB(BPXPRM00) @@ -246,6 +240,14 @@ Examples LIB('{{ DB2RUN }}.RUNLIB.LOAD') indentation: 16 + - name: Update a script with commands containing quotes. + zos_blockinfile: + src: "/u/scripts/script.sh" + insertafter: "EOF" + block: | + cat "//'{{ DS_NAME }}'" + cat "//'{{ DS_NAME_2 }}'" + - name: Set facts for the following two tasks. set_fact: HLQ: 'ANSIBLE' diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 65f1cc75b..046b8a2f5 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -47,6 +47,9 @@ state If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. + If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. + + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. @@ -268,6 +271,19 @@ tmp_hlq | **type**: str +force + Specifies that the data set can be shared with others during a member delete operation which results in the data set you are updating to be simultaneously updated by others. + + This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. + + The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. + + The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*. + + | **required**: False + | **type**: bool + + batch Batch can be used to perform operations on multiple data sets in a single module call. @@ -296,6 +312,9 @@ batch If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. + If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. + + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. @@ -508,6 +527,19 @@ batch | **type**: bool + force + Specifies that the data set can be shared with others during a member delete operation which results in the data set you are updating to be simultaneously updated by others. + + This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. + + The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. + + The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*. + + | **required**: False + | **type**: bool + + @@ -599,6 +631,13 @@ Examples state: absent type: MEMBER + - name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR + zos_data_set: + name: someds.name.here(mydata) + state: absent + type: MEMBER + force: yes + - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index d33ca6744..d34098617 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -56,7 +56,7 @@ owner job_id The job id that has been assigned to the job. - A job id begins must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. + A job id must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index bc56cf7b5..89ebcc805 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -185,6 +185,17 @@ encoding | **default**: IBM-1047 +force + Specifies that the data set can be shared with others during an update which results in the data set you are updating to be simultaneously updated by others. + + This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. + + The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. + + | **required**: False + | **type**: bool + + Examples @@ -226,6 +237,14 @@ Examples line: '\1APPUser\3' backrefs: yes + - name: Add a line to a member while a task is in execution + zos_lineinfile: + src: SOME.PARTITIONED.DATA.SET(DATA) + insertafter: EOF + line: 'Should be a working test now' + force: True + + diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index da86cf18d..d11cc8a98 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -31,10 +31,21 @@ commands Accepts a single string or list of strings as input. + If a list of strings is provided, processing will stop at the first failure, based on rc. + | **required**: True | **type**: raw +max_rc + Specifies the maximum return code allowed for a TSO command. + + If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. + + | **required**: False + | **type**: int + + Examples @@ -54,6 +65,12 @@ Examples commands: - LU TESTUSER + - name: Execute TSO command to list dataset data (allow 4 for no dataset listed or cert found) + zos_tso_command: + commands: + - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC + max_rc: 4 + @@ -87,6 +104,14 @@ output | **returned**: always | **type**: int + max_rc + Specifies the maximum return code allowed for a TSO command. + + If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. + + | **returned**: always + | **type**: int + content The response resulting from the execution of the TSO command. diff --git a/docs/source/modules/zos_volume_init.rst b/docs/source/modules/zos_volume_init.rst new file mode 100644 index 000000000..195435924 --- /dev/null +++ b/docs/source/modules/zos_volume_init.rst @@ -0,0 +1,257 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_volume_init.py + +.. _zos_volume_init_module: + + +zos_volume_init -- Initialize volumes or minidisks. +=================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Initialize a volume or minidisk on z/OS. +- *zos_volume_init* will create the volume label and entry into the volume table of contents (VTOC). +- Volumes are used for storing data and executable programs. +- A minidisk is a portion of a disk that is linked to your virtual machine. +- A VTOC lists the data sets that reside on a volume, their location, size, and other attributes. +- *zos_volume_init* uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class `STGADMIN.ICK.INIT`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. +- ICKDSF is an Authorized Program Facility (APF) program on z/OS, *zos_volume_init* will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. +- Note that defaults set on target z/OS systems may override ICKDSF parameters. +- If is recommended that data on the volume is backed up as the *zos_volume_init* module will not perform any backups. You can use the `zos_backup_restore <./zos_backup_restore.html>`_ module to backup a volume. + + + + + +Parameters +---------- + + +address + *address* is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. + + *address* can be the number assigned to the device (device number) when it is installed or the virtual address. + + | **required**: True + | **type**: str + + +verify_volid + Verify that the volume serial matches what is on the existing volume or minidisk. + + *verify_volid* must be 1 to 6 alphanumeric characters or ``*NONE*``. + + To verify that a volume serial number does not exist, use *verify_volid=*NONE**. + + If *verify_volid* is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. + + If *verify_volid=*NONE** is specified and a volume serial is found on the volume or minidisk, initialization does not complete. + + Note, this option is **not** a boolean, leave it blank to skip the verification. + + | **required**: False + | **type**: str + + +verify_offline + Verify that the device is not online to any other systems, initialization does not complete. + + | **required**: False + | **type**: bool + | **default**: True + + +volid + The volume serial number used to initialize a volume or minidisk. + + Expects 1-6 alphanumeric, national ($,#,@) or special characters. + + A *volid* with less than 6 characters will be padded with spaces. + + A *volid* can also be referred to as volser or volume serial number. + + When *volid* is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. + + | **required**: False + | **type**: str + + +vtoc_size + The number of tracks to initialize the volume table of contents (VTOC) with. + + The VTOC will be placed in cylinder 0 head 1. + + If no tracks are specified it will default to the number of tracks in a cylinder minus 1. Tracks in a cylinder vary based on direct-access storage device (DASD) models, for 3390 a cylinder is 15 tracks. + + | **required**: False + | **type**: int + + +index + Create a volume table of contents (VTOC) index. + + The VTOC index enhances the performance of VTOC access. + + When set to *false*, no index will be created. + + | **required**: False + | **type**: bool + | **default**: True + + +sms_managed + Specifies that the volume be managed by Storage Management System (SMS). + + If *sms_managed* is *true* then *index* must also be *true*. + + | **required**: False + | **type**: bool + | **default**: True + + +verify_volume_empty + Verify that no data sets other than the volume table of contents (VTOC) index or the VSAM Volume Data Set(VVDS) exist on the target volume. + + | **required**: False + | **type**: bool + | **default**: True + + +tmp_hlq + Override the default high level qualifier (HLQ) for temporary and backup datasets. + + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + + | **required**: False + | **type**: str + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Initialize target volume with all default options. Target volume address is '1234', set volume name to 'DEMO01'. + Target volume is checked to ensure it is offline and contains no data sets. Volume is SMS managed, has an index + and VTOC size defined by the system. + zos_volume_init: + address: "1234" + volid: "DEMO01" + + - name: Initialize target volume with all default options and additionally check the existing volid + matches the given value 'DEMO02' before re-initializing the volume and renaming it to 'DEMO01'. + zos_volume_init: + address: "1234" + volid: "DEMO01" + verify_volid: "DEMO02" + + - name: Initialize non-SMS managed target volume with all the default options. + zos_volume_init: + address: "1234" + volid: "DEMO01" + sms_managed: no + + - name: Initialize non-SMS managed target volume with all the default options and + override the default high level qualifier (HLQ). + zos_volume_init: + address: 1234 + volid: DEMO01 + sms_managed: no + tmp_hlq: TESTUSR + + - name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as + the existing volume serial is 'ine8d8' and there are no pre-existing data sets on the target. The check to see + if volume is online before intialization is skipped. + zos_volume_init: + address: e8d8 + vtoc_size: 30 + index: yes + sms_managed: yes + volid: ine8d8 + verify_volid: ine8d8 + verify_volume_empty: yes + verify_offline: no + + - name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03' + using Ansible loops. + zos_volume_init: + address: "090{{ item }}" + volid: "DEMO0{{ item }}" + loop: "{{ range(1, 4, 1) }}" + + + + + + +See Also +-------- + +.. seealso:: + + - :ref:`zos_backup_restore_module` + + + + +Return Values +------------- + + +msg + Failure message returned by module. + + | **returned**: failure + | **type**: str + | **sample**: 'Index' cannot be False for SMS managed volumes. + +rc + Return code from ICKDSF init command. + + | **returned**: when ICKDSF program is run. + | **type**: dict + +content + Raw output from ICKDSF. + + | **returned**: when ICKDSF program is run. + | **type**: list + | **elements**: str + | **sample**: + + .. code-block:: json + + [ + "1ICKDSF - MVS/ESA DEVICE SUPPORT FACILITIES 17.0 TIME: 18:32:22 01/17/23 PAGE 1", + "0 ", + "0 INIT UNIT(0903) NOVERIFY NOVERIFYOFFLINE VOLID(KET678) -", + "0 NODS NOINDEX", + "-ICK00700I DEVICE INFORMATION FOR 0903 IS CURRENTLY AS FOLLOWS:", + "- PHYSICAL DEVICE = 3390", + "- STORAGE CONTROLLER = 2107", + "- STORAGE CONTROL DESCRIPTOR = E8", + "- DEVICE DESCRIPTOR = 0C", + "- ADDITIONAL DEVICE INFORMATION = 4A00003C", + "- TRKS/CYL = 15, # PRIMARY CYLS = 100", + "0ICK04000I DEVICE IS IN SIMPLEX STATE", + "0ICK00703I DEVICE IS OPERATED AS A MINIDISK", + " ICK00091I 0903 NED=002107.900.IBM.75.0000000BBA01", + "-ICK03091I EXISTING VOLUME SERIAL READ = KET987", + "-ICK03096I EXISTING VTOC IS LOCATED AT CCHH=X\u00270000 0001\u0027 AND IS 14 TRACKS.", + "0ICK01314I VTOC IS LOCATED AT CCHH=X\u00270000 0001\u0027 AND IS 14 TRACKS.", + "-ICK00001I FUNCTION COMPLETED, HIGHEST CONDITION CODE WAS 0", + "0 18:32:22 01/17/23", + "0 ", + "-ICK00002I ICKDSF PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 0" + ] + diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index ab1e07e49..d897feef4 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -1,17 +1,63 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020, 2021 . +.. © Copyright IBM Corporation 2020, 2021, 2023 . .. ........................................................................... ======== Releases ======== -Version 1.5.0 +Version 1.6.0-beta.1 ==================== New Modules ----------- +- ``zos_volume_init`` - Can initialize volumes or minidisks on target z/OS systems which includes creating a volume label and an entry into the volume table of contents (VTOC). + +Minor Changes +------------- + +- ``zos_blockinfile`` - Adds an enhancement to allow double quotes within a block. +- ``zos_data_set`` - Adds a new option named *force* to enable deletion of a data member in a PDSE that is simultaneously in use by others. +- ``zos_job_query`` - Enables embedded positional wild card placement throughout *job_name* and *job_id* parameters. +- ``zos_lineinfile`` - Adds a new option named *force* to enable modification of a data member in a data set that is simultaneously in use by others. +- ``zos_tso_command`` - Adds a new option named *max_rc* to enable non-zero return codes lower than the specified maximum return as succeeded. + +Bugfixes +-------- + +- ``zos_copy`` + - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. + - Fixes a bug where the module would change the mode for a directory when copying in the contents of another directory. + - Fixes a bug where the incorrect encoding would be used during normalization, particularly when processing newlines in files. +- ``zos_encode`` - Fixes a bug where converted files were not tagged with the new code set afterwards. +- ``zos_find`` - Fixes a bug where the module would stop searching and exit after the first value in a list was not found. +- ``zos_lineinfile`` + - Removes use of Python f-string to ensure support for Python 2.7 on the controller. + - Fixes a bug where an incorect error message would be raised when a USS source was not found. + +Availability +------------ + +* `Automation Hub`_ +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS V2R3`_ or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. + + +Version 1.5.0 +============= + +New Modules +----------- + - ``zos_gather_facts`` - can retrieve variables from target z/OS systems that are then available to playbooks through the ansible_facts dictionary and managed using filters. Major Changes diff --git a/galaxy.yml b/galaxy.yml index 8aaf403db..cca9297d3 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.5.0 +version: 1.6.0-beta.1 # Collection README file readme: README.md @@ -17,7 +17,7 @@ authors: - Rich Parker <richp@ibm.com> - Ketan Kelkar <ketan.kelkar@ibm.com> - Ivan Moreno <ivan.moreno.soto@ibm.com> - - Oscar Fernando Flores Garcia<fernando.flores@ibm.com> + - Oscar Fernando Flores Garcia <fernando.flores@ibm.com> - Jenny Huang <jennyhuang@ibm.com> - Marcel Guitierrez <andre.marcel.gutierrez@ibm.com> diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index c2aab577a..484ad69fd 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.5.0" +version: "1.6.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index dd2172fc8..67bd83981 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020, 2021, 2022 +# Copyright (c) IBM Corporation 2019-2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/system.py b/plugins/module_utils/system.py index 90b9d1013..5be6d1944 100644 --- a/plugins/module_utils/system.py +++ b/plugins/module_utils/system.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_volume_init.py b/plugins/modules/zos_volume_init.py index 03854a80f..6dbc9f97e 100644 --- a/plugins/modules/zos_volume_init.py +++ b/plugins/modules/zos_volume_init.py @@ -63,12 +63,12 @@ verify_volid: description: - Verify that the volume serial matches what is on the existing volume or minidisk. - - I(verify_volid) must be 1 to 6 alphanumeric characters or "*NONE*". + - I(verify_volid) must be 1 to 6 alphanumeric characters or C(*NONE*). - To verify that a volume serial number does not exist, use - I(verify_volid="*NONE*"). + I(verify_volid=*NONE*). - If I(verify_volid) is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. - - If I(verify_volid="*NONE*") is specified and a volume serial is found on + - If I(verify_volid=*NONE*) is specified and a volume serial is found on the volume or minidisk, initialization does not complete. - Note, this option is B(not) a boolean, leave it blank to skip the verification. required: false diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index c5f660a6c..5a575d87c 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1916,8 +1916,6 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): ) dest_name = "{0}({1})".format(dest, member) src_name = "{0}({1})".format(src, member) - - # both src and dest need to be a loadlib rc = link_loadlib_from_cobol(hosts, dest_name, cobol_pds) assert rc == 0 diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 37bdcb682..118fdcc18 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 04dfb7368..fb1a47179 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 0231cc874..7128f12a7 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 7b77c155d..85f4184af 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From f4dc3f725a360b5bc59723d15e3461565c46fad3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 4 May 2023 17:51:00 -0600 Subject: [PATCH 105/495] Bugfix/619/mode set for files applied test case (#757) * Add test case for copy dest file * Add comments * Add test for folders * Adjust spaces * Changes for ensure consistency for all tests * Changes of name and clean creations --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> --- .../functional/modules/test_zos_copy_func.py | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 5a575d87c..97ec099dc 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1023,6 +1023,60 @@ def test_copy_non_existent_file_fails(ansible_zos_module, is_remote): assert "does not exist" in result.get("msg") +@pytest.mark.uss +@pytest.mark.parametrize("src", [ + dict(src="/etc/profile", is_remote=False), + dict(src="/etc/profile", is_remote=True),]) +def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, src): + hosts = ansible_zos_module + dest_path = "/tmp/test/" + try: + hosts.all.file(path=dest_path, state="directory", mode="750") + permissions_before = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + hosts.all.zos_copy(content=src["src"], dest=dest_path) + permissions = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + + for before in permissions_before.contacted.values(): + permissions_be_copy = before.get("stdout") + + for after in permissions.contacted.values(): + permissions_af_copy = after.get("stdout") + + permissions_be_copy = permissions_be_copy.splitlines()[1].split()[0] + permissions_af_copy = permissions_af_copy.splitlines()[1].split()[0] + + assert permissions_be_copy == permissions_af_copy + finally: + hosts.all.file(path=dest_path, state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("src", [ + dict(src="/etc/", is_remote=False), + dict(src="/etc/", is_remote=True),]) +def test_ensure_copy_directory_does_not_change_permission_on_dest(ansible_zos_module, src): + hosts = ansible_zos_module + dest_path = "/tmp/test/" + try: + hosts.all.file(path=dest_path, state="directory", mode="750") + permissions_before = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + hosts.all.zos_copy(content=src["src"], dest=dest_path) + permissions = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + + for before in permissions_before.contacted.values(): + permissions_be_copy = before.get("stdout") + + for after in permissions.contacted.values(): + permissions_af_copy = after.get("stdout") + + permissions_be_copy = permissions_be_copy.splitlines()[1].split()[0] + permissions_af_copy = permissions_af_copy.splitlines()[1].split()[0] + + assert permissions_be_copy == permissions_af_copy + finally: + hosts.all.file(path=dest_path, state="absent") + + @pytest.mark.uss @pytest.mark.seq def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): @@ -2727,3 +2781,4 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( assert v_cp.get("rc") == 0 finally: hosts.all.zos_data_set(name=dest, state="absent") + \ No newline at end of file From d54ac79b1461786fb0c3811d0a181952f332c586 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 4 May 2023 21:55:30 -0600 Subject: [PATCH 106/495] Bugfix/381/failed when the job name was null or not found (#747) * Add the verbose for failed when job name was null or not found * Adjust message for what we can get * Whitespaces move * Add code from dev * Ecode utility as is in dev * Year for copyright * Case for having both the jod_id and job_name * Ecode utils functions not in my branch * Add final line ecode * Add fragment * Delete encode function two times, adjust job message and change the fragment * Change variable name for one more descriptive * Restore encode and change one word * Encode * bugfixes * Set up as dev * Better fragment --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...7-failed_when_the_job_name_was_null_or_not_found.yaml | 5 +++++ plugins/module_utils/job.py | 9 +++++++-- tests/functional/modules/test_zos_job_output_func.py | 5 +++-- 3 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml diff --git a/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml b/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml new file mode 100644 index 000000000..0830b8fe3 --- /dev/null +++ b/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml @@ -0,0 +1,5 @@ +bugfixes: +- zos_job_output - Error message did not specify the job not found. + Fix now specifies the job_id or job_name being searched to ensure more + information is given back to the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/747) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 9af6260f4..94909aba4 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -86,7 +86,12 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, def _job_not_found(job_id, owner, job_name, dd_name): # Note that the text in the msg_txt is used in test cases thus sensitive to change jobs = [] - + if job_id != '*' and job_name != '*': + job_not_found_msg = "{0} with the job_id {1}".format(job_name.upper(), job_id.upper()) + elif job_id != '*': + job_not_found_msg = "with the job_id {0}".format(job_id.upper()) + else: + job_not_found_msg = "with the name {0}".format(job_name.upper()) job = {} job["job_id"] = job_id @@ -99,7 +104,7 @@ def _job_not_found(job_id, owner, job_name, dd_name): job["ret_code"]["msg"] = None job["ret_code"]["code"] = None job["ret_code"]["msg_code"] = None - job["ret_code"]["msg_txt"] = "The job could not be found." + job["ret_code"]["msg_txt"] = "The job {0} could not be found.".format(job_not_found_msg) job["class"] = "" job["content_type"] = "" diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 8cd55dd0f..4b3990ab5 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -31,7 +31,8 @@ """ TEMP_PATH = "/tmp/jcl" -JOB_NOT_FOUND_MSG_TXT="The job could not be found." +JOB_NOT_FOUND_MSG_TXT="The job with the name * could not be found." +JOB_NOT_FOUND_MSG_TXT_ID="The job with the job_id INVALID could not be found." def test_zos_job_output_no_job_id(ansible_zos_module): hosts = ansible_zos_module @@ -46,7 +47,7 @@ def test_zos_job_output_invalid_job_id(ansible_zos_module): results = hosts.all.zos_job_output(job_id="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT + assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT_ID def test_zos_job_output_no_job_name(ansible_zos_module): From 9d886cb9ffa6f13cde3c1598256928d0b49de858 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 5 May 2023 16:43:09 -0600 Subject: [PATCH 107/495] Bugfix/660/zos operator reported failure caused by unrelated error response messages (#762) * Add options * Add transparency on the response and test cases * Solve spaces * Add validation to append * Fragment Added * Adjust fail_json on non_zero response * Identation mistakes solved * Solve last idenation problem --- ...re-caused-by-unrelated-error-response.yaml | 4 ++ plugins/modules/zos_operator.py | 57 +++++++------------ .../modules/test_zos_operator_func.py | 14 ++++- 3 files changed, 36 insertions(+), 39 deletions(-) create mode 100644 changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml diff --git a/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml b/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml new file mode 100644 index 000000000..d7aae1c14 --- /dev/null +++ b/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml @@ -0,0 +1,4 @@ +bugfixes: + - zos_operator - Reported a failure caused by unrelated error response. + Fix now gives a transparent response of the operator to avoid false negatives. + (https://github.com/ansible-collections/ibm_zos_core/pull/762). \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index a0f66c302..5bd04ba50 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -217,26 +217,18 @@ def run_module(): # short_str is local, and just to check for problem response values. # ssctr is a limit variable so we don't pull more than 5 lines of each. result["content"] = [] - short_str = [] - ssctr = 0 - tstr = rc_message.get("stdout") - if tstr is not None: - for s in tstr.split("\n"): - if s: - result["content"].append(s) - if ssctr < 5: - short_str.append(s) - ssctr += 1 - ssctr = 0 - tstr = rc_message.get("stderr") - if tstr is not None: - for s in tstr.split("\n"): - if s: - result["content"].append(s) - if ssctr < 5: - short_str.append(s) - ssctr += 1 - + stdout = rc_message.get("stdout") + if stdout is not None: + for out in stdout.split("\n"): + if out: + result["content"].append(out) + stderr = rc_message.get("stderr") + error = [] + if stderr is not None: + for err in stderr.split("\n"): + if err: + error.append(err) + result["content"].append(err) # call is returned from run_operator_command, specifying what was run. # result["cmd"] = new_params.get("cmd") result["cmd"] = rc_message.get("call") @@ -247,27 +239,18 @@ def run_module(): # but it could still be a bad/invalid command. # As long as there are more than 2 lines, it's worth looking through. if int(result["rc"]) == 0: - if len(short_str) > 2: + if len(result["content"]) > 2: result["changed"] = True - for linetocheck in short_str: - if "invalid" in linetocheck.lower(): - result["exception"] = "Invalid detected: " + linetocheck - result["changed"] = False - module.fail_json(msg=result["exception"], **result) - elif "error" in linetocheck.lower(): - result["exception"] = "Error detected: " + linetocheck - result["changed"] = False - module.fail_json(msg=result["exception"], **result) - elif "unidentifiable" in linetocheck.lower(): - result["exception"] = "Unidentifiable detected: " + linetocheck - result["changed"] = False - module.fail_json(msg=result["exception"], **result) else: module.fail_json(msg="Expected response to be more than 2 lines.", **result) else: - module.fail_json( - msg="Non-zero response received: " + str(result["rc"]), **result - ) + module.fail_json(msg=("A non-zero return code was received : {0}. Review the response for more details.").format(result["rc"]), + cmd=result["cmd"], + elapsed_time=result["elapsed"], + wait_time_s=result["wait_time_s"], + stderr=str(error) if error is not None else result["content"], + stderr_lines=str(error).splitlines() if error is not None else result["content"], + changed=result["changed"],) except Error as e: module.fail_json(msg=repr(e), **result) except Exception as e: diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 146896e74..dbdb4f065 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -49,8 +49,18 @@ def test_zos_operator_invalid_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd="invalid,command", verbose=False) for result in results.contacted.values(): - assert result.get("changed") is False - assert result.get("exception") is not None + assert result.get("changed") is True + + +def test_zos_operator_invalid_command_to_ensure_transparency(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_operator(cmd="DUMP COMM=('ERROR DUMP')", verbose=False) + for result in results.contacted.values(): + assert result.get("changed") is True + transparency = False + if any('DUMP COMMAND' in str for str in result.get("content")): + transparency = True + assert transparency def test_zos_operator_positive_path(ansible_zos_module): From 3095388f87d22c340be5386f17fb71aa7d8ed614 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Thu, 25 May 2023 17:08:17 -0700 Subject: [PATCH 108/495] Replace prior tooling (makefile) that aidded the development workflow with a new 'ac' command. (#766) * Make file mount script helper Signed-off-by: ddimatos <dimatos@gmail.com> * Comments to mount script Signed-off-by: ddimatos <dimatos@gmail.com> * Staged updated scripts for makefile usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update mount scripts for use with makefile Signed-off-by: ddimatos <dimatos@gmail.com> * updates to correct mounts and add function to mounts-datasets Signed-off-by: ddimatos <dimatos@gmail.com> * adding completed new ac command files for development Signed-off-by: ddimatos <dimatos@gmail.com> * update ignore to more specific with venv Signed-off-by: ddimatos <dimatos@gmail.com> * Correcting ignore to allow for venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * moved logic that checks for info.env to venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Adding changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a path issue when calling venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes issue not being able to run all tests, fixes issue with content being written to collections folder Signed-off-by: ddimatos <dimatos@gmail.com> * Support zSH and update scp to fall back to legacy scp protocal Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Fix incorrect message and remove the cd's before and after ac-test Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .gitignore | 145 +++- Makefile | 744 ---------------- ac | 797 ++++++++++++++++++ .../766-ac-command-replace-makefile.yml | 4 + galaxy.yml | 33 +- make.env.encrypt | 287 ------- scripts/hosts.env | 42 + scripts/info.env.axx | 15 + scripts/mount-shr.sh | 92 -- scripts/mounts.env | 75 ++ scripts/mounts.sh | 700 +++++++++++++-- scripts/profile-shr | 230 ----- scripts/profile.sh | 73 +- scripts/requirements-2.11.env | 35 + scripts/requirements-2.12.env | 32 + scripts/requirements-2.13.env | 32 + scripts/requirements-2.14.env | 32 + scripts/requirements-2.9.env | 35 + scripts/requirements-common.env | 133 +++ scripts/requirements-latest.env | 31 + scripts/venv.sh | 585 +++++++++++++ 21 files changed, 2643 insertions(+), 1509 deletions(-) delete mode 100644 Makefile create mode 100755 ac create mode 100644 changelogs/fragments/766-ac-command-replace-makefile.yml delete mode 100644 make.env.encrypt create mode 100644 scripts/hosts.env create mode 100755 scripts/info.env.axx delete mode 100755 scripts/mount-shr.sh create mode 100644 scripts/mounts.env mode change 100644 => 100755 scripts/mounts.sh delete mode 100755 scripts/profile-shr create mode 100644 scripts/requirements-2.11.env create mode 100644 scripts/requirements-2.12.env create mode 100644 scripts/requirements-2.13.env create mode 100644 scripts/requirements-2.14.env create mode 100644 scripts/requirements-2.9.env create mode 100644 scripts/requirements-common.env create mode 100644 scripts/requirements-latest.env create mode 100755 scripts/venv.sh diff --git a/.gitignore b/.gitignore index 8a66463d2..9c4301951 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ *.rar *.tar *.zip +*.tar.gz ############################# # Output Folders # @@ -56,12 +57,16 @@ Thumbs.db *.bak *.swp -# Byte-compiled / optimized / DLL files +######################################### +# Byte-compiled / optimized / DLL files # +######################################### __pycache__/ *.py[cod] *$py.class -# Distribution / packaging +############################# +# Distribution / packaging # +############################# .Python build/ develop-eggs/ @@ -82,17 +87,24 @@ share/python-wheels/ *.egg MANIFEST +################################################################################ # PyInstaller # Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. +# before PyInstaller builds the exe, so as to inject date/other infos +# into it. +################################################################################ *.manifest *.spec -# Installer logs +#################### +# Installer logs # +#################### pip-log.txt pip-delete-this-directory.txt -# Unit test / coverage reports +################################ +# Unit test / coverage reports # +################################ htmlcov/ .tox/ .nox/ @@ -106,86 +118,115 @@ coverage.xml .hypothesis/ .pytest_cache/ -# Translations +################## +# Translations # +################## *.mo *.pot -# Django stuff: +################### +# Django # +################### *.log local_settings.py db.sqlite3 db.sqlite3-journal -# Flask stuff: +################### +# Flask # +################### instance/ .webassets-cache -# Scrapy stuff: +################### +# Scrapy # +################### .scrapy -# Sphinx documentation +########################## +# Sphinx documentation # +########################## docs/_build/ -# PyBuilder +########################## +# PyBuilder # +########################## target/ -# Jupyter Notebook +########################## +# Jupyter Notebook # +########################## .ipynb_checkpoints -# IPython +########################## +# IPython # +########################## profile_default/ ipython_config.py -# pyenv +########################## +# pyenv # +########################## .python-version +################################################################################ # pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock +# in version control. However, in case of collaboration, if having +# platform-specific dependencies or dependencies having no cross-platform +# support, pipenv may install dependencies that don't work, or not # install all needed dependencies. +################################################################################ #Pipfile.lock -# PEP 582; used by e.g. github.com/David-OConnor/pyflow +#################################################################### +# PEP 582; used by e.g. github.com/David-OConnor/pyflow # +#################################################################### __pypackages__/ -# Celery stuff +############# +# Celery # +############# celerybeat-schedule celerybeat.pid -# SageMath parsed files +########################## +# SageMath parsed files # +########################## *.sage.py -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings +############################ +# Spyder project settings # +############################ .spyderproject .spyproject -# Rope project settings +############################ +# Rope project settings # +############################ .ropeproject -# mkdocs documentation +############################ +# mkdocs documentation # +############################ /site -# mypy +############ +# mypy # +############ .mypy_cache/ .dmypy.json dmypy.json -# Pyre type checker +##################### +# Pyre type checker # +##################### .pyre/ - *.retry -# Visual Studio Code workspace configuration files +##################################################### +# Visual Studio Code workspace configuration files # +##################################################### .vscode/* .vscode/ !.vscode/tasks.json @@ -194,11 +235,31 @@ dmypy.json *.code-workspace .vscode/settings.json -# Development files -hosts +########################## +# Environments # +########################## +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +################################### +# Ansible z/OS Core Development # +################################### .ansible-test/ -.keep +.cache +.DS_Store +.python-version +.pytest_cache +info.env shell_exploits.txt -test_config.yml -make.env.encrypt -make.env \ No newline at end of file + +################################################################################ +# Debugging .ignore, if you want to know why a particular file is being ignored +# and by which rule, try `git check-ignore -v <file>` +# e.g. `git check-ignore -v venv/` +# .gitignore:244:venv/ venv/ +################################################################################ \ No newline at end of file diff --git a/Makefile b/Makefile deleted file mode 100644 index 4f1f6f58e..000000000 --- a/Makefile +++ /dev/null @@ -1,744 +0,0 @@ -# ============================================================================== -# Copyright (c) IBM Corporation 2022 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Makefile is used to assist with development tasks such as running tests cases -# or setting up a python virtual environment. -# This makefile relies on shell script `make.env` which should not be renamed. -# The contents of the `make.env` are encrypted to adhere to coporate operational -# requiements. If you need to edit the `make.env` be sure to use this makefile -# to access the script: -# (1) make decrypt <enter password at prompt> -# (2) vi/edit script the contents as needed -# (3) make encrypt <enter same password used to decrypt> -# While of some of the targets work without a venv, it's higly recommended you -# instruct make to create you a venv where it will perform operations: -# (1) make vsetup -# Optionally you can override the makefile's env var VENV to instruct it to -# create a `venv` based on your requiements.txt, you can do this by: -# (1) export VENV=venv-2.11 -# (2) make vsetup req=requirements-ac-2.11.12.txt -# Now all make targets will use the venv you assigned to the exported variable -# and also a directory `venv-2.11` will be created and populated with files used -# by make. You may consider pyvenv so that you can change your python versions -# to meet the needs of the various ansible-core versions. -# ============================================================================== - -# ============================================================================== -# GLOBAL VARS -# ============================================================================== - -CURR_DIR := $(shell pwd) -WHO := $(shell whoami) -HOST_PYTHON = python3 -# VENV = venv -# VENV := $(shell echo $$VENV) -VENV := $(shell echo "$${VENV:-venv}") -VENV_BIN=$(VENV)/bin - -ZOS_PYTHON_DEFAULT=3.8 -ZOAU_DEFAULT=1.1.1 -# Test if docker is running -DOCKER_INFO := $(shell docker info> /dev/null 2>&1;echo $$?) - -# Unit test to skip -SKIP = tests/functional/modules/test_module_security.py -divider="====================================================================" - -.PHONY: help Makefile -# ============================================================================== -# Makefile -# ============================================================================== - -# ============================================================================== -# Run a bandit security scan on the plugin directory -# ============================================================================== -## Run a bandit security scan on the plugins directory, set the severity level. -## Options: -## level - choose from 'l', 'll', 'lll' -## - l all low, medium, high severity -## - ll all medium, high severity -## - lll all hight severity -## Example: -## $ make bandit sev=ll -## $ make bandit sev=l -bandit: - ifdef sev - @echo $(divider); - @echo "Running Bandit scan with sev=${sev}"; - @echo $(divider); - @. $(VENV_BIN)/activate && bandit -r plugins/* -${sev} - else - @echo "No bandit sev (severity) has been set." - endif - - -# ============================================================================== -# Build the current collection based on the git branch local to the computer. -# Currently, venv's only manage python packages, colleciton installation is managed -# with paths, if we wwanted to install it in the venv to not dirty the host, we -# could try building a similar command to pythons venv: -# ansible-galaxy -vv collection install --force -p venv/lib/python3.8/site-packages/ansible_collections -# ============================================================================== -## Build and installa collection of the current branch checked out -## Example: -## $ make build -build: - @echo $(divider) - @echo "Building Ansible collection based on local branch and installing." - @echo $(divider) - - @. $(VENV_BIN)/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ - ansible-galaxy collection build && \ - ansible-galaxy collection install -f ibm-ibm_zos_core-* - - -## Build the changelog, this should be a release activity otherwise the generated -## files should not be checked in. -## Example: -## $ make buildChglog -buildChglog: - @. $(VENV_BIN)/activate && antsibull-changelog release - - -## Update the documentation for the collection after module doc changes have been -## made. This simply calls the make file in the docs directory, see the make file -## there for additional options. -## Example: -## $ make buildDoc -buildDoc: - @. $(VENV_BIN)/activate && make -C docs clean - @. $(VENV_BIN)/activate && make -C docs module-doc - @. $(VENV_BIN)/activate && make -C docs html - @. $(VENV_BIN)/activate && make -C docs view-html - - -# ============================================================================== -# Cleanup and teardown based on user selection -# ============================================================================== -## Cleanup and teardown the environment based on the level selected. -## Options: -## level - choose from 'min', 'all' -## - 'all' will remove the venv, restore any temporarily located files -## and ensure config is encrypted -## - 'min' will restore any temporarily located files -## and ensure config is encrypted -## Example: -## $ make clean level=all -## $ make clean level=min -clean: - ifdef level - ifeq ($(level),all) - @echo $(divider) - @echo "Complete teardown selected." - @echo $(divider) - - @echo $(divider) - @echo "Deleting python virtual environment 'venv'." - @echo $(divider) - @rm -rf $(VENV) - endif - - ifeq ($(level),min) - @echo $(divider); - @echo "Minimum teardown selected."; - @echo "Deleting files = [make.env, mount-shr.sh, profile-shr]."; - @echo $(divider); - @rm -rf $(VENV)/make.env - @rm -rf $(VENV)/mount-shr.sh - @rm -rf $(VENV)/profile-shr - endif - - @if test -e tests/functional/modules/test_module_security.txt; then \ - echo $(divider); \ - echo "Restoring 'test_module_security.py', previously removed to avoid execution."; \ - echo $(divider); \ - mv -f tests/functional/modules/test_module_security.txt tests/functional/modules/test_module_security.py; \ - fi - - # Unsure really need or even want to do this as part of cleanup - # @if test -e make.env; then \ - # echo $(divider); \ - # echo "Found uncrypted files, encrypting them."; \ - # echo $(divider); \ - # make encrypt; \ - # fi - else - @echo $(divider) - @echo "Default teardown, deleting $(VENV)" - @echo $(divider) - @rm -rf $(VENV) - endif - - -## Cleanup and remove geneated doc for the collection if its not going to be -## checked in -## Example: -## $ make cleanDoc -cleanDoc: - @. $(VENV_BIN)/activate && make -C docs clean - - -## Copy your ssh key to a `host` or the default which is your username. If you are -## copying a key to a production server, a second key will be copied used by the -## jenkins node, this minimizes the number of times you must copy a key. You must -## have set up a venv `venv` as that is where the environment script and configurations -## get written to manage this make file. It avoids continued decryption prompts to -## force users to set up the venv via `vsetup` -## Options: -## host - choose from a known host or don't set a value for the default operation -## which is to user your username to look up your default system -## Example: -## $ make copyKey host=ec33012a -## $ make copyKey -copyKey: - @echo $(divider) - @echo "Copying SSH keys to the managed node authorized_keys." - @echo $(divider) - - ifdef host - @${VENV}/./make.env --cert ${host} - else - @$(eval username := $(shell whoami)) - @${VENV}/./make.env --cert ${username} - endif - - -## Decrypt all scripts used with this Makefile using the user specified password -## Files include: ["mount-shr.sh", "profile-shr", "make.env"] -## If no password is provided, you will be prompted to enter a password for each -## file being decrypted. -## Example: -## $ make encrypt password= -## $ make decrypt -decrypt: - @# -------------------------------------------------------------------------- - @# Check configuration files exit - @# -------------------------------------------------------------------------- - #@if test ! -e scripts/mount-shr.sh.encrypt; then \ - # echo "File 'mount-shr.sh.encrypt' not found in scripts/mount-shr.sh.encrypt"; \ - # exit 1; \ - #fi - - #@if test ! -e scripts/profile-shr.encrypt; then \ - # echo "File 'scripts/profile-shr.encrypt' not found in scripts/profile-shr.encrypt"; \ - # exit 1; \ - #fi - - @if test ! -e make.env.encrypt; then \ - echo "File 'make.env.encrypt' not found in $(CURR_DIR)"; \ - exit 1; \ - fi - - @# ------------------------------------------------------------------------- - @# Decrypt configuration files - @# ------------------------------------------------------------------------- - ifdef password - #@echo "${password}" | openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh -pass stdin - #@chmod 700 scripts/mount-shr.sh - - #@echo "${password}" | openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr -pass stdin - #@chmod 700 scripts/profile-shr - - @echo "${password}" | openssl bf -d -a -in make.env.encrypt -out make.env -pass stdin - @chmod 700 make.env - else - #@openssl bf -d -a -in scripts/mount-shr.sh.encrypt -out scripts/mount-shr.sh - #@chmod 700 scripts/mount-shr.sh - - #@openssl bf -d -a -in scripts/profile-shr.encrypt -out scripts/profile-shr - #@chmod 700 scripts/profile-shr - - @openssl bf -d -a -in make.env.encrypt -out make.env - @chmod 700 make.env - endif - - -## Encrypt the configuration files with a `.encrypt` suffix for files -## [make.env, mount-shr.sh, profile-shr] with user specified password. -## If no password is provided, you will be prompted to enter a password for each -## file being encrypted. -## Example: -## $ make encrypt password= -## $ make encrypt -## Note: This is not a common operation, unless you tend to edit the configuration, avoid using this feature. -encrypt: - @# -------------------------------------------------------------------------- - @# Check to see if there is an unencrypted file(s) to encrypt, you would not - @# want to delete the encrypted version if the unecrypted is not present as - @# there would be no recovery process. Then check to see if there an - @# encrypted version of the file, if so delete it. - @# -------------------------------------------------------------------------- - @if [ -e make.env ] && [ -e make.env.encrypt ]; then \ - echo "Removing encrypted file 'make.env.encrypt' in $(CURR_DIR)."; \ - rm -rf make.env.encrypt; \ - fi - - # @if [ -e scripts/mount-shr.sh ] && [ -e scripts/mount-shr.sh.encrypt ]; then \ - # echo "Removing encrypted file 'scripts/mount-shr.sh.encrypt' in $(CURR_DIR)/scripts."; \ - # rm -rf scripts/mount-shr.sh.encrypt; \ - # fi - - # @if [ -e scripts/profile-shr ] && [ -e scripts/profile-shr.encrypt ]; then \ - # echo "Removing encrypted file 'scripts/profile-shr.encrypt' in $(CURR_DIR)/scripts."; \ - # rm -rf scripts/profile-shr.encrypt; \ - # fi - - @# -------------------------------------------------------------------------- - @# Encrypt the files since we have verified the uncrypted versions exist - @# Note: we should move make.env to scripts as well - @# -------------------------------------------------------------------------- - - ifdef password - - #ifneq ("$(wildcard scripts/mount-shr.sh)","") - # @echo "${password}" | openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt -pass stdin - # # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - # @rm -f scripts/mount-shr.sh - #endif - - #ifneq ("$(wildcard scripts/profile-shr)","") - # @echo "${password}" | openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt -pass stdin - # # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - # @rm -f scripts/profile-shr - #endif - - ifneq ("$(wildcard make.env)","") - @echo "${password}" | openssl bf -a -in make.env -out make.env.encrypt -pass stdin - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env - endif - - else - #ifneq ("$(wildcard scripts/mount-shr.sh)","") - # @openssl bf -a -in scripts/mount-shr.sh -out scripts/mount-shr.sh.encrypt - # # @openssl bf -a -in scripts/mount-shr.sh > scripts/mount-shr.sh.encrypt - # @rm -f scripts/mount-shr.sh - #endif - - #ifneq ("$(wildcard scripts/profile-shr)","") - # @openssl bf -a -in scripts/profile-shr -out scripts/profile-shr.encrypt - # # @openssl bf -a -in scripts/profile-shr > scripts/profile-shr.encrypt - # @rm -f scripts/profile-shr - #endif - - ifneq ("$(wildcard make.env)","") - @openssl bf -a -in make.env -out make.env.encrypt - # @openssl bf -a -in make.env > make.env.encrypt - @rm -f make.env - endif - endif - - -# ============================================================================== -# Self documenting code that when comments are created as expected, the help -# is auto generated. Supports multiline comments when comments are prefixed with -# 2 pound signs and a space, see examples in this makefile. -# ============================================================================== -## Help on how how to use this Makefile, options and examples. -help: - @awk '{ \ - if ($$0 ~ /^.PHONY: [a-zA-Z\-\_0-9]+$$/) { \ - helpCommand = substr($$0, index($$0, ":") + 2); \ - if (helpMessage) { \ - printf "\033[36m%-20s\033[0m %s\n", \ - helpCommand, helpMessage; \ - helpMessage = ""; \ - } \ - } else if ($$0 ~ /^[a-zA-Z\-\_0-9.]+:/) { \ - helpCommand = substr($$0, 0, index($$0, ":")); \ - if (helpMessage) { \ - printf "\033[36m%-10s\033[0m %s\n", \ - helpCommand, helpMessage; \ - helpMessage = ""; \ - } \ - } else if ($$0 ~ /^##/) { \ - if (helpMessage) { \ - helpMessage = helpMessage"\n "substr($$0, 3); \ - } else { \ - helpMessage = substr($$0, 3); \ - } \ - } else { \ - if (helpMessage) { \ - print "\n "helpMessage"\n" \ - } \ - helpMessage = ""; \ - } \ - }' \ - $(MAKEFILE_LIST) - - -# ============================================================================== -# Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured) -# ============================================================================== -## Install a collection from galaxy and specify the version. -## Options: -## version - any GA and beta versions currently on Galaxy -## Example: -## $ make install 1.4.0-beta.1 -## $ make install -install: - ifdef version - @echo $(divider); - @echo "Installing 'ibm.ibm_zos_core' collection version=${version}."; - @echo $(divider); - @. $(VENV_BIN)/activate && ansible-galaxy collection install -fc ibm.ibm_zos_core:${version} - else - @echo $(divider); - @echo "Installing latest non-beta 'ibm.ibm_zos_core' collection."; - @echo $(divider); - @. $(VENV_BIN)/activate && ansible-galaxy collection install -fc ibm.ibm_zos_core - endif - - -## Copy your ssh key to a `host` or the default which is your username. Then -## copy the super share mount script and profile for the mounts, execute the -## mount script and exit, upon rmote ssh, `profile-shr` will be located -## at `/u/${user} where user is defined in the make.env `host_list`. You must -## have set up a venv `venv` as that is where the environment script and configurations -## get written to manage this make file. It avoids continued decryption prompts to -## force users to set up the venv via `vsetup` -## Options: -## host - choose from a known host or don't set a value for the default operation -## which is to user your username to look up your default system -## Example: -## $ make mountProfile host=ec33012a -## $ make mountProfile -mountProfile: - ifdef host - @make copyKey host=${host} - @echo $(divider) - @echo "Copying mount script to managed node and executing." - @echo "Copying profile-shr to managed node." - @echo $(divider) - @${VENV}/./make.env --files "${host}" "${VENV}/mount-shr.sh" "${VENV}/profile-shr" - else - @make copyKey - @echo $(divider) - @echo "Copying mount script to managed node and executing." - @echo "Copying profile-shr to managed node." - @echo $(divider) - @$(eval username := $(shell whoami)) - @${VENV}/./make.env --files ${username} $(VENV)/mount-shr.sh $(VENV)/profile-shr - endif - - -# ============================================================================== -# Print the configuration used to connect to the managed node for functional tests -# ============================================================================== -## Print the contents of the config file (venv/config.yml) which is used to -## connect to the managed z/OS node to run functional tests on. This will only -## be available if yo have set up a venv using `make vsetup` because a password -## is required to generate the config and is considered sensitive content per -## corporate policy. -## Example: -## $ make printConfig -printConfig: - @if test -e $(VENV)/config.yml; then \ - cat $(VENV)/config.yml; \ - else \ - echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ - fi - - -# ============================================================================== -# Print the make.env contents -# ============================================================================== -## Print the contents of the venv/make.env, this only works if -## you have set up a venv using `make vsetup` because a password is required to -## decrypt and a decrypted copy will be placed in the venv. -## Example: -## $ make printEnv -printEnv: - @if test -e $(VENV)/make.env; then \ - cat $(VENV)/make.env; \ - else \ - echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ - fi - - -# ============================================================================== -# Print the make.env contents -# ============================================================================== -## Print the contents of the venv/mount-shr.sh, this only works if -## you have set up a venv using `make vsetup` because a password is required to -## decrypt and a decrypted copy will be placed in the venv. -## Example: -## $ make printMount -printMount: - @if test -e $(VENV)/mount-shr.sh; then \ - cat $(VENV)/mount-shr.sh; \ - else \ - echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ - fi - - -# ============================================================================== -# Print the make.env contents -# ============================================================================== -## Print the contents of the venv/profile-shr, this only works if -## you have set up a venv using `make vsetup` because a password is required to -## decrypt and a decrypted copy will be placed in the venv. -## Example: -## $ make printEnv -printProfile: - @if test -e $(VENV)/profile-shr; then \ - cat $(VENV)/profile-shr; \ - else \ - echo "No configuration was found, consider creating a venv using `make vsetup` first."; \ - fi - - -## Display the z/OS managed nodes available and configured. This will show which -## systems you can use in the host argument for `make test host<....>` -## Example: -## $ make printTargets -printTargets: - @${VENV}/./make.env --targets - - -# ============================================================================== -# Run the sanity test using docker given python version else default to venv -# ============================================================================== -## Run sanity tests either in the virtual environment (venv) or docker if there is a running docker engine -## Options: -## version - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', no selection will run all available python versions -## Example: -## $ make sanity version=3.8 -## $ make sanity -sanity: - ifeq ($(DOCKER_INFO),0) - ifdef version - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --python $(version) --requirements --docker default && \ - cd $(CURR_DIR); - else - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --requirements --docker default && \ - cd $(CURR_DIR); - endif - else - ifdef version - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --python $(version) --requirements && \ - cd $(CURR_DIR); - else - @. $(VENV_BIN)/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ansible-test sanity --requirements && \ - cd $(CURR_DIR); - endif - endif - - -# ============================================================================== -# Run functional tests: -# ============================================================================== -## Run collection functional tests inside the python virtual environment (venv) -## Options: -## host - z/OS managed node to run test cases, no selection will default to -## a system registerd to your user name, see make.env -## python - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11 -## no selection defauls to 3.8 -## zoau - Z Open Automation Utilites to use with the collection, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1 -## no selection defaults to 1.1.1 -## name - the absoulte path to a particluar test case to run, no selection -## will default to all test cases running. -## debug - enable debug for pytest (-s), any value will result in true enabling -## debug, default is to not define a value so that it evaluates to false -## Example: -## $ make test (runs all tests using default users system and dependencies) -## $ make test name=tests/functional/modules/test_zos_copy_func.py debug=true (run specific test and debug) -## $ make test host=ec33012a python=3.9 zoau=1.1.1 name=tests/functional/modules/test_zos_copy_func.py debug=true -test: - @# -------------------------------------------------------------------------- - @# Expecting the zOS host, python version and zoau version to use with - @# generating a configuration for us with zTest helper. - @# -------------------------------------------------------------------------- - - ifdef host - ifdef python - ifdef zoau - @echo $$(${VENV}/./make.env --config ${host} ${python} ${zoau})>$(VENV)/config.yml - else - @echo "Option 'zoau=<version>' was not set, eg zoau=1.1.1" - @exit 1 - endif - else - @echo "No python version option was set, eg python=3.8" - @exit 1 - endif - else - @# -------------------------------------------------------------------------- - @# When a quick test with no options and defaults are acceptable, a - @# lookup using the users usersname is mapped to a default of known - @# zos targets registered in make.env - @# -------------------------------------------------------------------------- - - $(eval username := $(shell whoami)) - echo $$(${VENV}/./make.env --config ${username} ${ZOS_PYTHON_DEFAULT} ${ZOAU_DEFAULT})>$(VENV)/config.yml - - endif - - @# -------------------------------------------------------------------------- - @# Check configuration was created in venv/config.yml, else error and exit - @# -------------------------------------------------------------------------- - - @if test ! -e $(VENV)/config.yml; then \ - echo "No configuration created in $(VENV)/config.yml "; \ - exit 1; \ - fi - - @# -------------------------------------------------------------------------- - @# Check if name='a specific test' and if debug was set, else run all tests - @# -------------------------------------------------------------------------- - - ifdef name - ifdef debug - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} $(name) --host-pattern=all --zinventory=$(VENV)/config.yml -s - else - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} $(name) --host-pattern=all --zinventory=$(VENV)/config.yml - endif - else - ifdef debug - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} --host-pattern=all --zinventory=$(VENV)/config.yml -s - else - @. $(VENV_BIN)/activate && $(VENV_BIN)/pytest --ignore=${SKIP} --host-pattern=all --zinventory=$(VENV)/config.yml - endif - endif - - -# ============================================================================== -# Check the version of the ibm_zos_core collection installed -# ============================================================================== -## Get the version of the ibm_zos_core collection installed -## Example: -## $ make version -version: - @echo $(divider) - @echo "Obtaining Ansible collection version installed on this controller." - @echo $(divider) - - @cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ - |grep version|cut -d ':' -f 2 | sed "s/,*$\//g" | tr -d '"'; - -# ============================================================================== -# Setup the python virtual environment, the default name is 'venv'. You can -# override the default name by exporting the variable VENV: -# (1) export VENV=venv-2.11 -# (2) make vsetup req=requirements-ac-2.11.12.txt -# @test -d $(VENV) || $(HOST_PYTHON) -m venv $(VENV) -# ============================================================================== -## Create a python virtual environment (venv) based on the hosts python3 -## Options: -## req - your requirements.txt else a default one will be used -## Example: -## $ make vsetup -## $ make vsetup req=path/to/requirements.txt -## -## Override the default virtual environment name 'venv' by exporting var VENV -## $ export VENV=venv-2.11 -## $ make vsetup req=requirements-ac-2.11.12.txt -vsetup: - - @# ------------------------------------------------------------------------- - @# Create the virtual environment directory if it does not exist - @# ------------------------------------------------------------------------- - @if test ! -d $(VENV); then \ - echo $(divider); \ - echo "Creating python virtual environment directory $(VENV)."; \ - echo $(divider); \ - $(HOST_PYTHON) -m venv $(VENV); \ - else \ - echo "Virtual environment already exists, no changes made."; \ - fi - - @# ------------------------------------------------------------------------- - @# Check if files exist in venv, if they do we should not decrypt/replace - @# them as they could have edits and risk losing them. - @# ------------------------------------------------------------------------- - - @if test ! -e $(VENV)/make.env && \ - test ! -e $(VENV)/mount-shr.sh && \ - test ! -e $(VENV)/profile-shr; then \ - echo $(divider); \ - echo "Decrypting files into $(VENV)."; \ - echo $(divider); \ - make decrypt; \ - mv make.env $(VENV)/; \ - mv scripts/mount-shr.sh $(VENV)/; \ - mv scripts/profile-shr $(VENV)/; \ - else \ - echo "Files $(VENV)/[make.env, mount-shr.sh,profile-shr] already exist, no changes made."; \ - fi - - ifdef req - @if test -f ${req}; then \ - echo $(divider); \ - echo "Installing user provided python requirements into $(VENV)."; \ - echo $(divider); \ - cp ${req} ${VENV}/requirements.txt; \ - . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ - fi - else - @if test ! -e $(VENV)/requirements.txt; then \ - echo $(divider); \ - echo "Installing default python requirements into $(VENV)."; \ - echo $(divider); \ - echo $$(${VENV}/./make.env --req)>${VENV}/requirements.txt; \ - . $(VENV_BIN)/activate && pip install -r $(VENV)/requirements.txt; \ - else \ - echo "Requirements file $(VENV)/requirements.txt already exists, no new packages installed."; \ - fi - endif - - -# ============================================================================== -# You don't need to activate your venv with this Makefile, but should you want -# to, you can with vstart. -# ============================================================================== -## Start the venv if you plan to work in a python virtual environment -## Example: -## $ make vstart -vstart: - @echo $(divider) - @echo "Activating python virtual environment 'venv', use 'vstop' to deactivate." - @echo $(divider) - @. $(VENV_BIN)/activate; exec /bin/sh -i - - -# ============================================================================== -# Deactivate your venv -# ============================================================================== -## Deactivate (stop) the venv -## Example: -## $ make vstop -vstop: - @echo $(divider) - @echo "Deactivate python virtual environment 'venv'." - @echo $(divider) - @. deactivate - - -# ============================================================================== -# Unused but maybe can repurpose code snippets -# ============================================================================== -# Build the command, this is not run initially -# CMD_CONFIG := $(shell $(VENV)/./make.env --config ${host} ${python} ${zoau}) -# Define the executible `GEN_CONFIG` and assign it to CONFIG -# GEN_CONFIG = $(eval CONFIG=$(CMD_CONFIG)) - -# ============================================================================== -# Makefile tip: -# ============================================================================== -# If you have formatting issues; try `cat -e -t -v Makefile`. -# ^I represent tabs and $'s represent end of the line. -# -# If you need to debug your makefile command, use `-nd`, eg `make -nd vstop` diff --git a/ac b/ac new file mode 100755 index 000000000..b01fa8bf8 --- /dev/null +++ b/ac @@ -0,0 +1,797 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# Global Vars +# ============================================================================== + +# Note: using the venv.sh script to find the latest venv puts the current 'ac' +# in the scripts directory because venv.sh performs a 'cd $(dirname $0)' and +# then other scripts can't be found in the managed venv corectly. Although this +# is probably a temporary solution, we now take the same code from venv.sh and +# use it here in 'ac' to find the latst managed venv. What should be +# done is to have some meta-data written out to venv/* that this command 'ac' +# can easily find, might be helpful to have some stats like dates created and +# so on. +# VENV=`scripts/./venv.sh --latest_venv` + +VENV_HOME_MANAGED=${PWD%/venv}/venv + +# Lest normalize the version from 3.10.2 to 3010002000 +# Do we we need that 4th octet? +normalize_version() { + echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; +} + +latest_venv(){ + dir_version_latest="0" + test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` + + if [ ! -z "$test_for_managed_venv" ]; then + for dir_version in `ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* | cut -d"-" -f2`; do + if [ $(normalize_version $dir_version) -ge $(normalize_version $dir_version_latest) ]; then + dir_version_latest=$dir_version + fi + done + echo "${VENV_HOME_MANAGED}"/"venv-"$dir_version_latest + fi +} + +VENV=`latest_venv` + +file="" +verbose=0 +DIV="-----------------------------------------------------------------------" +CURRENT_DIR=`pwd` +cd $CURRENT_DIR +# VENV_BIN should equate to same as $VIRTUAL_ENV after the venv activate +if [ ! -z "$VENV" ]; then + VENV_BIN=$VENV/bin + VENV_BASENAME=`basename $VENV` +fi + +CURR_DIR=`pwd` +RED=$'\e[1;31m' +GRN=$'\e[1;32m' +YEL=$'\e[1;33m' +BLU=$'\e[1;34m' +MAG=$'\e[1;35m' +CYN=$'\e[1;36m' +ENDC=$'\e[0m' +# 0 Docker is up, 1 docker is not up +DOCKER_INFO=`docker info> /dev/null 2>&1;echo $?` + +# ============================================================================== +# Arg parsing helpers +# ============================================================================== +terminate() { + printf '%s\n' "$1" >&2 + exit 1 +} + +message(){ + echo $DIV; + echo "$1"; + echo $DIV; +} + +ensure_managed_venv_exists(){ + if [ -z "$VENV" ]; then + echo "Option $1 requires that a managed virtual environment be configured. "\ + "Run $0 -venv-setup to create managed viritual environments. "\ + "For additional optons, use $0 --help." + exit 1 + fi +} + +# ------------------------------------------------------------------------------ +# Generate simple formated but incomplete help +# ------------------------------------------------------------------------------ +# usage_simple(){ +# script="$0" +# base_name_script=`basename "$script"` +# grep '^##' "$script" | sed -e 's/^##//' -e "s/_PROG_/$base_name_script/" 1>&2 +# } + +# ------------------------------------------------------------------------------ +# This method auto generates help based on the comments found in this script. +# ----------------+------------------------------------------------------------- +# Comment style | Description +# ----------------+------------------------------------------------------------- +# '#->command:' | `#->` followed by a keyword is the help command displayed +# ----------------+------------------------------------------------------------- +# '## ' | The first found pattern after a help command will be the +# | help command description. Subsequent such patterns will be +# | right justified and considered options or descriptions +# ----------------+------------------------------------------------------------- +# '# ' | This pattern is ignored and considered script comments +# ----------------+------------------------------------------------------------- +# ------------------------------------------------------------------------------ +#->help: +## Print help message (-h, -? produce short version, otherwise verbose) +## Usage: ac [-h, -?, --help] +## Example: +## $ ac --help +help(){ + if [ "$1" = "verbose" ]; then + awk '{\ + if (($0 ~ /^#->[a-zA-Z\-\_0-9.]+:/)) { \ + helpCommand = substr($0, 4, index($0, ":")); \ + helpMessage ="";\ + } else if ($0 ~ /^##/) { \ + if (helpMessage) { \ + helpMessage =" "substr($0, 3); \ + } else { \ + helpMessage = substr($0, 3); \ + } \ + if (helpCommand && helpMessage) {\ + printf "\033[36m%-16s\033[0m %s\n", helpCommand, helpMessage; \ + helpCommand =""; \ + commandContext=" Supports format: <option> <value> and <option>=<value>";\ + print commandContext;\ + } else {\ + print helpMessage + } + } + }' $0 + else + awk '{\ + if (($0 ~ /^#->[a-zA-Z\-\_0-9.]+:/)) { \ + helpCommand = substr($0, 4, index($0, ":")); \ + helpMessage ="";\ + } else if ($0 ~ /^##[[:space:]][[:space:]]*\$[[:space:]]ac/) { \ + helpMessage = substr($0, 3); \ + if (helpCommand && helpMessage) {\ + printf "\033[36m%-16s\033[0m %s\n", helpCommand, helpMessage; \ + helpCommand =""; \ + } else {\ + helpMessage=" "substr($0, 3); \ + print helpMessage + } + } + }' $0 + fi +} + +# The case stmt sees it this way: +# --foo abc ---> $1 = foo, $2 = abc +# --foo=abc ---> $1 = --foo=abc +option_processor(){ + + opt=$1 + arg=$2 + if [ "$arg" ]; then + echo $arg + elif [ "$opt" ]; then + # Split up to "=" and set the remainder + value=${opt#*=} + # If the value is not the same as the option ($1),then assign it . + if [ "$opt" != "$value" ]; then + echo $value + else + # Don't echo, will return from the function, send to error msg to stderr + ERROR_MSG="${RED}ERROR${ENDC}: option $option requires a non-empty argument." + printf '%s\n' "$ERROR_MSG" >&2 + echo "exit 1" + fi + fi +} + +option_sanitize(){ + option_value=$1 + $option_value 2> /dev/null +} + +# ============================================================================== +# Commands +# ============================================================================== + +# ------------------------------------------------------------------------------ +# Run a bandit security scan on the plugin directory +# ------------------------------------------------------------------------------ +#->ac-bandit: +## Run a bandit security scan on the plugins directory, set the severity level. +## Usage: ac [-s <level>, --bandit <level>] +## Usage: ac [-s <level>, --bandit <level>] +## <level> - choose from 'l', 'll', 'lll' +## - l all low, medium, high severity +## - ll all medium, high severity +## - lll all high severity +## Example: +## $ ac --ac-bandit --level ll +## $ ac --ac-bandit +ac_bandit(){ + option_level=$1 + if [ ! "$option_level" ]; then + option_level="ll" + fi + message "Running Bandit scan with level '$option_level'" + . $VENV_BIN/activate && python3 -m bandit -r plugins/* -"${option_level}" +} + +# ------------------------------------------------------------------------------ +# Build and install collection of the local GH branch. +# To not dirty the host, consider installing in the venv: +# ansible-galaxy -vv collection install --force -p venv/lib/python3.8/site-packages/ansible_collections +# ------------------------------------------------------------------------------ +#->ac-build: +## Build and install collection of the local GH branch. +## Usage: ac [-b, --ac-build] +## Example: +## $ ac --ac-build +ac_build(){ + gh_branch=`git branch |grep "*" | cut -d" " -f2` + message "Build and install collection of the local GH branch: '$gh_branch'." + . $VENV_BIN/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ + $VENV_BIN/ansible-galaxy collection build && \ + $VENV_BIN/ansible-galaxy collection install -f ibm-ibm_zos_core-* +} + +# ------------------------------------------------------------------------------ +# Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured) +# ------------------------------------------------------------------------------ +#->ac-install: +## Install collection 'ibm_zos_core' from a repository such as Galaxy. If no +## version is specified, latest GA level in repository will be installed. +## Usage: ac [--ac-install] [--version <version>] +## Options: +## version - The collection version +## Example: +## $ ac --ac-install --version 1.5.0-beta.1 +## $ ac --ac-install +ac_install(){ + option_version=$1 + + if [ "$option_version" ];then + message "Installing 'ibm.ibm_zos_core' collection version=${option_version}." + . $VENV_BIN/activate && $VENV_BIN/ansible-galaxy collection install -fc ibm.ibm_zos_core:${option_version} + else + message "Installing 'ibm.ibm_zos_core' lastet GA version." + . $VENV_BIN/activate && $VENV_BIN/ansible-galaxy collection install -fc ibm.ibm_zos_core + fi +} + +# ------------------------------------------------------------------------------ +# Run the sanity test using docker given python version else default to venv +# ------------------------------------------------------------------------------ +#->ac-sanity: +## Run ansible-test in docker if the docker engine is running, else run them in +## a managed virtual environment using the installed python version. +## Usage: ac [--ac-lint] [--version <version>] +## Options: +## <version> - Only applies to when docker is running. +## - No version selection will run all available python versions in docker. +## - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', .... +## Example: +## $ ac --ac-sanity +## $ ac --ac-sanity --version 3.10 +ac_sanity(){ + option_version=$1 + if [ "${DOCKER_INFO}" == "0" ]; then + if [ "${option_version}" ]; then + message "Running ansible-test with docker container and python version ${option_version}." + . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ${VENV_BIN}/ansible-test sanity --python ${option_version} --requirements --docker default && \ + cd ${CURR_DIR}; + else + message "Running ansible-test with docker container and all python versions." + . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ${VENV_BIN}/ansible-test sanity --requirements --docker default && \ + cd ${CURR_DIR}; + fi + else + if [ "${option_version}" ]; then + message "Docker engine is not running, version ${option_version} will be ignored." + fi + + . $VENV_BIN/activate && VENV_PY_VER=`python3 --version | cut -d" " -f2 | cut -d"." -f1,2` + message "Running ansible-test with managed python virtual environment: ${VENV}." + . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + ${VENV_BIN}/ansible-test sanity --python ${VENV_PY_VER} --requirements && \ + cd ${CURR_DIR}; + fi +} + +# ------------------------------------------------------------------------------ +# Run functional tests: +# ------------------------------------------------------------------------------ +#->ac-test: +## Run the functional tests inside the managed python virtual environment. +## Usage: ac [--ac-test] [--host <host>] [--python <python>] [--zoau <zoau>] [--file <file>] [--debug <boolean>] +## Options: +## host - z/OS managed node to run test cases, no selection defaults to +## a host registerd to your user id (`whoami`). +## python - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11, +## no selection defauls to 3.8. +## zoau - ZOAU to use in testing, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1, +## no selection defaults to 1.1.1 . +## file - the absoulte path to a test suite to run, no selection +## defaults to all tests running. +## debug - enable debug for pytest (-s), choices are true and false +## Example: +## $ ac --ac-test --host ec33012a --python 3.10 --zoau 1.2.2 --file tests/functional/modules/test_zos_operator_func.py --debug true +## $ ac --ac-test --file tests/functional/modules/test_zos_operator_func.py --debug true +## $ ac --ac-test +ac_test(){ + host=$1 + python=$2 + zoau=$3 + file=$4 + debug=$5 + skip=$CURR_DIR/tests/functional/modules/test_module_security.py + + # Create the config always overwriting existing + ${VENV}/./venv.sh --config ${host} ${python} ${zoau} ${VENV} + + # Check configuration was created in venv/config.yml, else error and exit + if test ! -e ${VENV}/config.yml; then + echo "No configuration was able to be created in ${VENV}/config.yml " + exit 1 + fi + + #cd ${VENV_BIN} + + if [ "$file" ]; then + if [ "$debug" ]; then + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml -s + else + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml + fi + else + for file in `ls tests/functional/modules/*.py`; do + # For some reason '--ignor'e not being honored so injecting a work around + if [ "$file" != "$skip" ]; then + if [ "$debug" ]; then + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml -s + else + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml + fi + fi + done + fi + + #cd ${CURR_DIR} +} + +# ------------------------------------------------------------------------------ +# Print the configuration used to connect to the managed node for functional tests +# ------------------------------------------------------------------------------ +#->ac-test-config: +## Disply the contents of configuration file used to run functional tests. +## Usage: ac [--ac-test-config] +## Example: +## $ ac --ac-test-config +ac_test_config(){ + if [ -f "${VENV}/config.yml" ]; then + message "Print test configuration used for functional testing." + cat ${VENV}/config.yml; + else + message "No configuration was found, run '--ac-test' to generate a configuration." + fi +} + +# ------------------------------------------------------------------------------ +# Check the version of the ibm_zos_core collection installed +# ------------------------------------------------------------------------------ +#->ac-version: +## Obtain the version of the collection installed on the controller. +## Usage: ac [--ac-version] +## Example: +## $ ac --ac-version +ac_version(){ + message "Ansible collection version installed on this controller." + cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ + | grep version|cut -d ':' -f 2 | sed 's/,*$//g' | tr -d '"'; +} + +# ------------------------------------------------------------------------------ +# Encrypt a file, it the users responsiblity to remove the uncrypted file +# afterwards, also ensure it does not end up in a public rep such that it is +# in .gitignore +# Consider adding salt +# # encrypt file.txt to file.enc using 256-bit AES in CBC mode +# openssl enc -aes-256-cbc -salt -in file.txt -out file.enc +# the same, only the output is base64 encoded for, e.g., e-mail +# openssl enc -aes-256-cbc -a -salt -in file.txt -out file.enc +# decrypt binary file.enc +# openssl enc -d -aes-256-cbc -in file.enc -out file.txt +# decrypt base64-encoded version +# openssl enc -d -aes-256-cbc -a -in file.enc -out file.txt +# ------------------------------------------------------------------------------ +#->file-encrypt: +## Encrypt a file as a new file +## Usage: ac [--file-encrypt --file <file> --out-file <file> --password <password>] +## Options: +## file - the file to encrypt. +## out-file - the encrypted output. +## password - the key (password) used to encrypt the file. +## Example: +## $ ac --file-encrypt --file some.txt --out-file some.txt.axx --password 12345678 +file_encrypt(){ + option_file=$1 + option_out_file=$2 + option_pass=$3 + + if [ ! "$option_file" ] || [ ! "$option_out_file" ] || [ ! "$option_pass" ]; then + message "Unable to encrpyt file, missing option values." + exit 1 + fi + message "Encrypting file $option_file as file $option_out_file." + touch $option_out_file + chmod 700 $option_out_file + echo "${option_pass}" | openssl enc -aes-256-cbc -a -salt -in $option_file -out $option_out_file -pass stdin + #echo "${option_pass}" | openssl bf -a -in $option_file -out $option_out_file -pass stdin +} + +# ------------------------------------------------------------------------------ +# decrypt a file, it the users responsiblity to remove the uncrypted file +# afterwards, also ensure it does not end up in a public rep such that it is +# in .gitignore +# ------------------------------------------------------------------------------ +#->file-decrypt: +## Decrypt a file as a new file and apply file permissions 700, RWX only to the owner. +## Usage: ac [--file-decrypt --file <file> --out-file <out-file> --password <password>] +## Options: +## file - the file to decrypt. +## out-file - the decrypted output +## password - the key (password) used when encrypting the file. +## Example: +## $ ac --file-decrypt --file some.txt.axx --out-file some.txt --password 12345678 +file_decrypt(){ + option_file=$1 + option_out_file=$2 + option_pass=$3 + if [ ! "$option_file" ] || [ ! "$option_out_file" ] || [ ! "$option_pass" ]; then + message "Unable to decrpyt file, missing option values." + exit 1 + fi + message "Decrypting the file ${option_file} as ${option_out_file}." + #echo "${option_pass}" | openssl bf -d -a -in $option_file -out $option_out_file -pass stdin + echo "${option_pass}" | openssl enc -d -aes-256-cbc -a -in $option_file -out $option_out_file -pass stdin + chmod 700 $option_out_file +} + + +# Cleanup and remove geneated doc for the collection if its not going to be +# checked in +# Example: +# $ make cleanDoc +clean(){ + echo Todo + # @. $(VENV_BIN)/activate && make -C docs clean +} + +clean_doc(){ + echo Todo + # cleanDoc + # @. $(VENV_BIN)/activate && make -C docs clean +} + +#->host-auth: +## Copy your ssh key to a `host` or the default which is your username. +## Usage: ac [--host-auth] [--host <host>] +## Options: +## host - z/OS managed node, no selection defaults to +## a host registerd to your user id (`whoami`). +## Example: +## $ ac --host-auth --host ec33012a +host_auth(){ + option_host=$1 + if [ ! "$option_host" ]; then + host=`whoami` + fi + message "Copy SSH keys to the managed node $option_host" + $VENV/./venv.sh --cert $option_host +} + +# ------------------------------------------------------------------------------ +# Copy mount & profile scripts to users home directory on the target and execute +# mount script. Should automatically authenticate your ssh key. +# ------------------------------------------------------------------------------ +#->host-mount: +## Copy mount and profile scripts in users home directory and excute the mount. +## Usage: ac [--host-mount --host <host>] +## Options: +## host - z/OS managed node id, no selection defaults to +## a host registerd to your user id (`whoami`). +## Example: +## $ ac --host-mount --host ec33012a +host_mount(){ + option_host=$1 + if [ ! "$option_host" ]; then + option_host=`whoami` + fi + host_auth $option_host + message "Copying mount.env, mount.sh, profile.sh scripts to host $option_host and then mounting shared drive." + #$VENV/./hosts.sh --cert $1 + $VENV/./venv.sh --host-setup-files $option_host $VENV/"mounts.env" $VENV/"mounts.sh" $VENV/"profile.sh" + #$VENV/./hosts.sh --mount $1 $VENV/"mounts.env" $VENV/"mounts.sh" $VENV/"shell-helper.sh" $VENV/"profile.sh" +} + +# ------------------------------------------------------------------------------ +# Print the mount table contents +# ------------------------------------------------------------------------------ +#->host-mounts: +## Print the ZOAU and Python mount tables used by this utility. +## Usage: ac [--host-mounts] +## Example: +## $ ac --host-mounts +host_mounts(){ + message "Print mount tables used by the $0 utility" + $VENV/mounts.sh --print-mount-tables +} + +# ------------------------------------------------------------------------------ +# Print the managed z/OS node IDs +# ------------------------------------------------------------------------------ +#->host-nodes: +## Display the z/OS managed node IDs. +## Usage: ac [--host-nodes] +## Example: +## $ ac --host-nodes +host_nodes(){ + message "Print local managed node IDs." + $VENV/venv.sh --targets +} + +# ------------------------------------------------------------------------------ +# This is the only script that has to be relativly executed from (scripts/) +# because at this point, no managed venv exists. +# TODO: Support -force to replace/update and possible BYO reqs file +# ------------------------------------------------------------------------------ +#->venv-setup: +## Create managed virtual environments using the latest (discovred) python3. +## If the password option is not provided, the info.env.axx file will not be +## associated to the managed venv's, thus you will see messages asking you to +## export some variables such as USER, HOST_SUFFIX, etc. Choosing not to use +## the 'password' option should only an option when the utility can not decrypt. +## Usage: ac [--venv-setup] [--password 123456] +## Example: +## $ ac --venv-setup --passsword 123456 +## $ ac --venv-setup +venv_setup(){ + option_pass=$1 + message "Create managed virtual environments based on hosts latest python3." + scripts/./venv.sh --vsetup --password $option_pass +} + +# ------------------------------------------------------------------------------ +# Allows you to activate the lastet ansible managed virtual enviroments +# TODO: Allow user to specify which venv they can start +# ------------------------------------------------------------------------------ +#->venv-start: +## Activate the lastest ansible managed virtual environment. +## Usage: ac [--venv-start] +## Example: +## $ ac --venv-start +venv_start(){ + message "Starting managed python virtual environment: $VENV_BASENAME" + #. $VENV_BIN/activate; exec /bin/sh -i + /bin/bash -c ". $VENV_BIN/activate; exec /bin/sh -i" +} + +# ------------------------------------------------------------------------------ +# Allows you to deactivate the lastet ansible managed virtual enviroments +# TODO: Allow user to specify which venv they can stop +# ------------------------------------------------------------------------------ +#->venv-stop: +## Deactivate the lastest ansible managed virtual environment. +## Usage: ac [--venv-stop] +## Example: +## $ ac --venv-stop +venv_stop(){ + message "Stopping managed ansible virtual environment located at: $VENV_BASENAME" + message "ac --venv-stop does not actually currently work, use CNTL-D" + . deactivate $VENV_BASENAME; +} + +# ============================================================================== +# Main arg parsing +# ============================================================================== +while true; do + option=$1 + if [ "$option" ]; then + # Check that we see a '-' or '--' in all options, else error and exit. + test_long_input=`echo "$option" | grep "^--?*"` + test_short_input=`echo "$option" | grep "^-?*"` + if [ ! "$test_short_input" ] && [ ! "$test_long_input" ]; then + echo "Please use valid syntax for option $option, it appears to be missing '-' or '--'." + exit 1 + fi + fi + + case $1 in + -h|-\?|--help) + if [ "$1" = "-h" ] || [ "$1" = "-?" ]; then + help + else + help "verbose" + fi + exit + ;; + --ac-bandit) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-bandit" + ;; + --ac-build) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-build" + ;; + --ac-install) + ensure_managed_venv_exists $1 # Command + option_submitted="--ac-install" + ;; + --ac-sanity |--ac-sanity=?*) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-sanity" + ;; + --ac-test|--ac-test=?*) # command + ensure_managed_venv_exists $1 + option_submitted="--ac-test" + ;; + --ac-test-config|--ac-test-config=?*) # command + ensure_managed_venv_exists $1 + option_submitted="--ac-test-config" + ;; + --ac-version) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-version" + ;; + --file-encrypt) # Command + ensure_managed_venv_exists $1 + option_submitted="--file-encrypt" + ;; + --file-decrypt) # Command + ensure_managed_venv_exists $1 + option_submitted="--file-decrypt" + ;; + --host-auth|--host-auth=?*) # Command + ensure_managed_venv_exists $1 + option_submitted="--host-auth" + ;; + --host-config) # Command + ensure_managed_venv_exists $1 + option_submitted="--host-config" + ;; + --host-mount|--host-mount=?*) # Command + ensure_managed_venv_exists $1 + option_submitted="--host-mount" + ;; + --host-mounts) + ensure_managed_venv_exists $1 # Command + option_submitted="--host-mounts" + ;; + --host-nodes) # Command + ensure_managed_venv_exists $1 + option_submitted="--host-nodes" + ;; + --venv-setup) # Command + option_submitted="--venv-setup" + ;; + --venv-start) # Command + ensure_managed_venv_exists $1 + option_submitted="--venv-start" + ;; + --venv-stop) # Command + ensure_managed_venv_exists $1 + option_submitted="--venv-stop" + ;; + --debug|--debug=?*) # option + debug=`option_processor $1 $2` + option_sanitize $debug + shift + ;; + --file|--file=?*) # option + file=`option_processor $1 $2` + option_sanitize $file + shift + ;; + --host|--host=?*) # option + host=`option_processor $1 $2` + option_sanitize $host + shift + ;; + --level|--level=?*) # option + level=`option_processor $1 $2` + option_sanitize $level + shift + ;; + --out-file|--out-file=?*) # option + out_file=`option_processor $1 $2` + option_sanitize $out_file + shift + ;; + --password|--password=?*) # option + password=`option_processor $1 $2` + option_sanitize $password + shift + ;; + --python|--python=?*) # option + python=`option_processor $1 $2` + option_sanitize $python + shift + ;; + # --tests|--tests=?*) # option + # tests=`option_processor $1 $2` + # option_sanitize $tests + # shift + # ;; + --version|--version=?*) # option + version=`option_processor $1 $2` + option_sanitize $version + shift + ;; + --zoau|--zoau=?*) # option + zoau=`option_processor $1 $2` + option_sanitize $zoau + shift + ;; + --) # End Arg parsing + #shift + break + ;; + -?*) # Warn for invalid but continue parsing for valid + printf "${YEL}WARN${ENDC}: Unknown option (ignored) for $0: %s\n" "$1" >&2 + shift + ;; + *) + if [ ! "$option_submitted" ]; then + # Error when no '-' short or long '--' found and exit + echo "${RED}ERROR${ENDC}: No long or short option has been submitted, use './ac --help to see options." + exit 1 + fi + + # Nothing left to process, drop down into action processing + break + esac + shift +done + +# ============================================================================== +# Action processing +# ============================================================================== + +if [ "$option_submitted" ] && [ "$option_submitted" = "--ac-bandit" ] ; then + ac_bandit $level +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then + ac_build +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then + ac_install $version +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-sanity" ] ; then + ac_sanity $version +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test" ] ; then + ac_test ${host:=""} ${python:=""} ${zoau:=""} ${file:=""} ${debug:=""} +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test-config" ] ; then + ac_test_config +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-version" ] ; then + ac_version +elif [ "$option_submitted" ] && [ "$option_submitted" = "--file-encrypt" ] ; then + file_encrypt $file $out_file $password +elif [ "$option_submitted" ] && [ "$option_submitted" = "--file-decrypt" ] ; then + file_decrypt $file $out_file $password +elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-auth" ] ; then + host_auth $host +elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-mount" ] ; then + host_mount $host +elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-mounts" ] ; then + host_mounts +elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-nodes" ] ; then + host_nodes +elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-setup" ] ; then + venv_setup $password +elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-start" ] ; then + venv_start +elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-stop" ] ; then + venv_stop +fi diff --git a/changelogs/fragments/766-ac-command-replace-makefile.yml b/changelogs/fragments/766-ac-command-replace-makefile.yml new file mode 100644 index 000000000..ca0d17e0f --- /dev/null +++ b/changelogs/fragments/766-ac-command-replace-makefile.yml @@ -0,0 +1,4 @@ +trivial: +- ac - fixed makefile limitations and monolithic design. Command 'ac' performs + similar function only with greater automation and detection and modularity. + (https://github.com/ansible-collections/ibm_zos_core/pull/766) \ No newline at end of file diff --git a/galaxy.yml b/galaxy.yml index cca9297d3..e4b998278 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -64,27 +64,30 @@ issues: https://github.com/ansible-collections/ibm_zos_core/issues # Ignore files and directories matching the following patterns build_ignore: + - '*.tar.gz' + - __pycache__ + - .cache + - .DS_Store + - .git + - .github + - .gitignore + - .python-version + - .pytest_cache + - .vscode - Jenkinsfile + - ac - ansible.cfg - - .gitignore - - .github - - '*.tar.gz' - - docs - - collections - changelogs + - collections - docs - - tests/__pycache__ + - scripts + - test_config.yml + - tests/*.ini + - tests/*.py - tests/.pytest_cache + - tests/__pycache__ - tests/functional - tests/helpers - - tests/unit - - tests/*.py - - tests/*.ini - tests/requirements.txt - - test_config.yml - - changelogs + - tests/unit - venv* - - make.env.encrypt - - Makefile - - make.env - - scripts \ No newline at end of file diff --git a/make.env.encrypt b/make.env.encrypt deleted file mode 100644 index d07e7032e..000000000 --- a/make.env.encrypt +++ /dev/null @@ -1,287 +0,0 @@ -U2FsdGVkX1+EjJmnWmQEpgyUNArAe8yFuGLfSiIhYAY/a9bxa+ItpMqm5UnIsAIE -np5moXmd3pj9MXZTVJLJSSBQ/QuePjdEfL+HNZWX0Waw8GXdmIWkBkhdBLeRdXqy -0XHRXTptcXhFXx9fOenjgSxm8oblTy3P85EsbVcUrCP9DWEyvAnrFpLSFFcz2OB7 -6/wn5EgMHGhyjaTjcyfkNU+Ae7rNTCAj6Hcl049UzAFb3hGqaz8F/g0wp/mX/ThI -2qDZCsLlREPZsgfqQkW1zXPQxS08eRmUR9FsDnDH1X0dbp/9eSHu5IT3MrwawEz5 -hS/K3XkuHFpWQWWf3JBPXYl0A/m2WCM39bmg13mfnDBJsxmnEY8510zdNTHTqSUG -n9L/ffp2CpCjEDbKQJuzrfuzQ/h5I37XqKvE6OWdY/T48mDsqajGepPBmZF1WrlQ -y7ZMWIlhzxw8P2ZX1nWwchbTxy9kjKs71tk+ikKUoEU56SY+WmVjKvXZtUB3sY/Q -2Nho49hpGJuPV0tE6x8oAMW3ERZI0BYWMrLOf4bm76LGgs85WrEzz7dnaPkCaEbH -SGPx/U93f7zV0X05sdOPPRoZ4mxLxNOtgA8qAQLeAlFnzbLRlq0q4M2MQ4YZ/9HY -HTE+CW8sj35e6TRxPqfF38yisVv9JmQEhZ83qfMqrTaR4PqLWhCm/Hak3LMUNRTw -UrdGDmvhCk/BHqj/kmRyCY5Ts9Z3gktUD9OSuxBVnPxRo+exabbjGqMK0Eslwzgc -nvgba6ReKkh1xf/3PgI3A1ZOKJzkE3YctTcHV0+o26I7JW4kesTdXYIM30RgVF7X -N/akJFV42ZT8xEmYA0k/v3tM/xpQVzj8FbzAjP/d8L6etIolTJPWS/+rkgTaiZdD -EWN7kLfgzmC/OClSjuGRyLNbRed7rGuDLmKZKY+6Bd7tcLgBWHKJQKnZlJcoSx0C -MbTS29iZHmEkV1WWeTm/mrjqZsQ8/5ky4Ug73RePk+rlFovxCLSypFOxJRHR+nte -nYE7iJmTVwE008IoKTWbwxZaqbROSqKL9sw0r5Kps1Eve7oGSoNdzzCTArlteN5e -yU7pz1lZrOWGlT4pVnYnxuY7ayGkLvxqNWBG+bq6VYsh4W9oMHyW4hmRRTcvfB9B -/1B57RUM6rCxAWZYd+/T2TU0Me19NKxhwHct0zrllNwypWUCZQ4kfvQvVu5Xo2AJ -LawsQ1KUcFfVLUfq9ecQU8OSU4MYYIibPrjUqmQF7Sk22HM9cXghAWY0nPfiHTVd -jdWn1c9z8lVNk/jZVxN6H/MS8mC4BRw0tCJDWKWEkhxX3V8AvRdTOIsKYp+i2U5j -CIcVGMfAtmqg1WpJfVYLZrYI2uskjL6uNmy7XmFxdztbiZyuE37NEd7cw0q8n8d/ -mB+BBQ4x6Jd5qvlWNceaRv5QxNZOs36CxM/ta4bGBaAa47CuJ1EMo4EyoirT2Hh7 -tulT758q0HXYsKcg6EImwAYJvEws7MDGFdpWyWz2c9gQX6biNBRXfaGzoXE+hQ8A -e+qPaxO8s5yCGqiKu47KreUotbP0YePyEE+RlRIJGt8t9JGtMVz6k8MzcEVRhtWK -iX1mwjUR2saocyXATYggy7Xmooy8nLjvKqkYx5FKmL/csuwC1gdlDrLRl0MPIhtP -z5o3SSLDud0TzQdM0vnbaV33ynMbon1MRW1CL5OKMHFMh4Zoilm1ti26JiQZQGNS -1899V1IAjsVUgLtCctP4WiIlFw3BCwYjXHnD2r7gCJc+jSkjwrM+ayhZ9VIGFC9K -J7+Ynx9eon/QqvMT5Ek0pJnVsmjJtWLsmOEWmW57VFWKCgW2wW8s9+/YLdUnRCvd -B54x2MNHcGLWx09C/Or2Ir7U1Gcz0LXkkzbDifabUjaH3LjIfYafnyYcu1BIV9R9 -ucP9f4Kow/oWZHi5t0SFNO8ArqLOumTHxFpd0MtsV64xJf8TuZJkSEZbxrb7VoGM -p0zVVgZTp2Ryf8gN3kpTPQE8CCZInpUgipza59z5rtgpGR2X6y1Qswb7vMez9NrG -ihDA3iQX6v/osxNTVfBsTfEjE5tj1vWlm5JxteGZ44RiRmMk5GuvfO3atccFEmxf -NKtJy14jwmy0IepxSMrs9sa8/cLQGH6OBOPUqmE8OpnVtlu8BUt7y9BrUMtSU7g1 -ej3drUGzyuAMSXDPhRuJjL2KlR7/YPKvLpBvhGa+vn/VkFcaeZmx96Aq0dOA2dQD -v/y5xiW13f1s6/MNfm1pt4vGMoijlYHPPH6e4sFRGhqcgsHkJuJuF0HKCunXbrEU -eSgpZ+zzAEt0C/Bk9iaKYvqG9m8YDL+vG+hZETHucvmGATURoyB9DeyLAgLWSIGb -LCiL893yhbgVc4j7tns98X146xX3naHZcCUKzN7cFAn0Kzdxv4T0TMw10wTuQj7L -pzykhyrlSCatGK9ydrZWP2rg8xIiAxtvCPvhZf3Douk4KuF7VTAwOSM3wlgAnuZG -KqhmaFb2cl/UynMiREMYkAgnSo4oraLSIDT2tKkCm/Kv+Fx4HsUCAH205l5ew+/J -9ftje6gE2evhaycR7sbOWul2N3gWHPxNjq21SjZ61uTVfrdu1Ls0vPQ5VlEapFmL -UeHCCiRL6wDZx572zukaueonghq8PUO4GoPWzY3j8kX3PaObzNxyfC+Nt8n2yklJ -vyuWzEMTym5d2knQwt/FSsyz+KCHEfRwqd6Nj0q0KTyfwU2be92Zf8WPDhn6Gv+e -dNjGLdgjX/bvhsEJb1SarSPM6Va8x85qja5PUxcsWYyEhX2VOxWNnG/9HMwhPDDg -oHiBRUKTx2GQ0MShB+M/yG7yWu9JRcBSj8jk6+z5TfdFXfed1+3+h/4v5FNlvBQw -E5WRadVZRa9FjgpjZ/ABDZbTUTIM05akVyegoRxViX6Q2u93u3HpjA2BYM633uJs -l85czl4l6t19Qaes/wyStS5KXMYR4QbmHAmn95zz5C6UMYa/F1SXsbthooEuYMqp -WFBhceMu1d31POJImVbfAY7GQOjsy9Iv4ftbiG0SI+PvByP1/HOwlkyJi1nWRPTv -7yOclBXc3WRx/zvejsQule/Q2pgpeuvB/vw3LVR6GwZfsPzSWoyHurmIJd/+Mpe+ -NyCaUrTsB7IRwnwCX2eKwb9qzyLPZIW/iveJPL/2YrxK1IpHSvbq7fSMGmgWH4V+ -mYIEIiDavMCuZk+yS6WgEF7DtYT8ZAn8UCNmLvDtlMXMVXKHIF7rflSiv3zm1nul -o0giVpplQljtFdl0oOw7iqhpNzSNvTSzHGo/wxMT4ECMuWJ3/cYttFmaf0gY83W8 -tEKl7JG33jxf1MRQwLe99Tn2XTnZC6Mlc5uATQFZu6hwekhjzgO5HBkaK7yaGO2U -xS8m8QT/MWK2Wkqz9WFXjlerLAHR/p7GiWSuRdB5PaBYut1gZwAJvuE+X935qJ07 -nP6jAwuITvxBC3rUKBRcksthAVbVtfQgBw9OfgzUwdEuGf1a0GhrqjRVyC+bwLwa -SVAzu5nlMK0raxxoxTKJ41mWe0wFft8voz4XXCIg+Z9rnNLhxeeIyVCIFFbbDvjP -taYLmadGlxc7Q3aGphWwlfMqVmkyz+8OYzBpXxF3DH6cUFCuqQopiA+7YaJ0GJ7U -AuUYrFWTIAUXcoXpZaZfpirqpoCx7GTkKr8uxFer2KnJcgBtZaz4nIGypVyxYZw/ -x3yMHoCP3MgYJLeMklb0NQYpFhm+yl8T2emxXddDdDqFWKQJXrk2V2XTCsCLPNnQ -qs98SR/jnirtGctuNT/gHxBQjzQfBZNCMEfHLSbDjGt6Jd/KvwsfDC6Pc7a+RrRN -TBObD2KPs0i4mTn9dYjeesRtC9GLucGZ/uT8CsybvXRY2kZi4GV6uHJuQoCjZ5+b -/9+TMIXoWSw9TqA4kesARL/ieUYjUAvTGFsPnNmvG414exjPMFd1bOO2PZM5PMHb -3bj9csVHmF/+MvbUEzJSIl30f+G/4T/8pd/dvQBWbr5rIEys4mLGc6bQTKDp0kwo -9SXblV71Ff97UNpe0G9/m/RhL9PxwTnSOg9aFhUHT/6oWBNeg0TqZdzHcC+nZQ1P -Jjuw28ym8PFigJ0jIwEDoErN3ycIMbvksw7knPbNnGLzsKRaJlawVhijxE5gEpek -fUyUdqo8VCvKkBv3zZmr/QcUO9wF4Ru5ZpHIL3e7K61bgB436hmWMUkLfAiNAxrg -VW0i5HCo6oIrVTyEp7SoWfM9pKyE45EIcwHWlxfyQumJwP8hXPB335yqfyEWAZ6p -9VScSwvvLbe4ruRmjhTsB0Y3i7P6RmDf+0iVnt1g+BG/PAEeR3c9/Zq0G+CXf2he -EFBHPAA9Q2MLp6cF+FNorJ7uLFqCFK/S+IyvPbcf6RFJyIzG+IJf1V4tvmzbh3Bq -O2qhmfaCSDrtPefgIC/nJxgBzDPfJKBKLsuIBQFeapXS3LeZmgZ5btGPy6xasdic -KScJQdPQpF1/bMYmFkf2gdjBxVcIuJE0YgIH0w1snhR9yWRaSGxUafQisOvdMSFR -rWC+9IYHkxNDfhdX+HO9ZMpb6IrwBnw4vv/bHanpp5KIo96pcX85gxwQ7tEP7GZE -4gm3/8fFigthmOM3yLyUdaqKCgjnDqdVVqVldqnNRp81KsXhE+pMdZQCrTU0F6gP -89bYgTA1yQfoM7TEGUz78zA2JmMo0dOIu2dwu3tz24/l5tDxkUVOF0Tx99RaMCix -kvQKYfMFaaBqeMxJViJkfjPxHelqDqk3WAqXhC5t9m56aBqWbasd5rO+7BdsoraE -00m7R3WpMSFSBEEeYTYMmEqyEORVVEqRa376AMWLaFtKtWLu42XFQvJjYk6vbCoX -ZTvev9C/RKwQkrK+JidNJA20haG403OOACxDiDHn49E09KgrMIl7B62hdT13OCun -1mMe7QzPpN2VdzglWGQsz2cyMMBywCiD+A+bPku2B3w0pGGHjKS4IhKiOLkNeR2u -P1qQ2d5U3RRk5BmLevQVGgeV2bHTZP7lDbO7GqWL0uvgWijayoqW/RYxiniXpCNJ -ZK6QwWKyFyr+yaipB8wRm9gmpJdawo4QHl/0T+05jgTHdg2O7t22OVWNNYF4fzms -TqPYYV7KEp3sGk0jrhtzGwz0FE57NLZwFyz0E94afX0ZmnjBLWoFzX5R+YddoOVN -pikf5RHi7iAO8fVurVkoiH0G6osukNqS7myeylU2VjqmmB8u/b09Up3faAHPjLEH -yZewcVoAc+b9x9xQXU7Vk4RO54+HsyooRZAtp7ZXIyPrNpaGpdJV8p4oBtCuM4SR -2BPncWvmmwqae/NFMQnaLsZcDzmgUoyObzwf/fiiXdCYMvJFyFO0wt8yspUCgyPi -6+mkVOhH2mSZc8PFZKMb8nPEfpLsXN/1k9ePdXx3i43tTVeXnyAfdNU4hIjKFfgD -w4aXAvIPe3oHr+gYgwLFgJqTAlr8Egb4Ae+OmJgSI5DCepA0pPI1fmoN+aWT4lWT -q7xUGgQBzw6xU4rIptQfrpNXU1TpkUqsNYlLL1Uu4o4E8d8rdToziVAGZGgusPcT -LgbgMY6TcZMv01BLzq9L/u5acKQKf+AOMQveVupBsSlpVQda/ufj59Up7tPXq9J4 -w8U7qmsHSjv+DczwgzbYn4oX9au2aC5VtglZdNIoe2DT3QMpolblU+V9rMLOnC+F -19EDLn7U+nq0ZpxeSzH0LXDcslN7e/pGwcPJymdk7KWCXf3R4mtf7+IKup2xkO3c -PmOZBYGmESiiupBGYeB5TjmPBRyW/ESaMO+/kwBqV0v5lKG33ATCSDcZ0gq8DOTI -hJza2b4tF2V1qbbAn7FbUHlgxPKvcITsH7yOA9JtdBTSKpixTR6OyV6UNq4xWuyZ -MgzQUHH2ydLrWSiwc5K6KKclsBznaRcMv8Tjj6Ph3soWGRO9dtiV4Qp52OP1HMPA -lX/yybLm4wCjXUetaRkH+bg0+Cfh6NLylXh3vgs/0BSXAVX/zJriYOhpVFsS4nAX -5GVoM/n/C1oEfw/zF7KA2lTDuQHHXJ46PpOxBNfdC2Bx6mcLpconSrHmwRpfdaGT -dd3aeyDZr7wFgsB/gFcqLuOG8ZFW6m0rmMwNQXa6nHImFgereYE0Qt2uSSTWgFH6 -T7NaCsTfUlUvrXxLoWjx7V4yotbnxCP9g0HClewDeXKE7EBYjI5C4QCPvkVfS+2K -GghKjCROpHGnIrl2sghAtI2twcgKA5+wj1Zz9H1O8t75sPYneixHOZCPwNspO6QJ -GSzVCKM/90Qf4io6l0l919/40Y7RxlDJP2c/HaIYXq9aXb/DTOA4EM7nTFMNuPjC -Gl636cfvYEyV1NPEpjWP80398wAsBWePWAJAwlkedw1+Da5ETnmVxG6oIFIS+O78 -2grviqXNS2nzsY2hPU/6Sbe/FGIP5z++6f5dWFNdS9hRUgw57nwKMNp9emhPMeR3 -mFQEqfGNJ6GG4mttCpwXqxKOiNV5PGEHG6X8fQbkBVTApNar0lvTUVza046B7Prr -1agCfgvdSU7ScInXy/ptBEUzmcurc/Qpq3APhyU+4ekCzbkHOz8p8CaB18xHatV2 -Z7mmYp7m4DAHQjCB4Ax85wI5IzB3bzfgw/uDkSgkyTcfAEdT5WLplfBlqf+SDg68 -eWE1Qe/Oa34gwHAdcCsKpg214I7vmTaUbjmWtgqhfTL/98b/trF9bVoZONjld+rT -FRIAIU0JBGBRWbadlAmTnt/vZLGGlopKehw4MzVIrv7HKDxF9dy27nXtGwWFHnd8 -DgFEN07e5U1yjd5OJ3hs+RLjozjO+3cZGxn+rodXktcOXjduA1nAaSZrFPlwjURM -EwnEHWCNlEw1NxlGmpwmuJetcbyqkCPia6Adx5yhlvFE6J+a6n1jRdrbABDfMJ9K -8sCCLRP5sdmrPfc/EoHDocjgyN+khJzfcGQxe0gTzploiMnZi43nOZISh0LG69zC -ApBdQo5ycR+kHM6YMpR46EUl6MbDF5h50leSatflVOsuauLzCVtPHN/1CjaJ+uij -H0yT4ZPFCynxb0j/Zk6J4PuZmxdDVRNo4K4C3vqF7eYgniMTaGbsD/gSubbCC2qg -bIBDYI9nxB7of7Qt8u8CkfO6F7KrFrCDNT2elbRonKl5Pddw6KVaDLK8sfauGpgb -i/XQaL5fVT31e5uGNQYgi4zvNb/JxvY4Yl2K7qwVWecXvgnDfxdgGCDoy0m0h4iY -ZFKunIIPZq11ljy+PgwON955Nwka9aJSkQDKo/t7HKlZqQxYpYyOFd2gsnQEx1FS -rfj5ZgbOYjkPumuBzI4Thfwf+e/0I5stfUukveXsoVmSGt0ta8euOKPogvPZHlsu -67k2DWMsxce9N8E11AgoSQVboYI6YU+VUudyrRdVhUEkp0HvuqxbSB0uz/E70uWu -xZr2gZGhk6yfMAp+yQcGN6dJv2u1GBslV6yYpr0BfHEuJnePHJSwmL9NT3TBrcyS -D/Zh9NtDnuQsbWZXKB/1r3XRqqTfTetDyz4WvRcvBmkoQ4X5RMVT8ywlGzKFieqC -RTUVCWNdG20rVLeuJcaE5txkVfTgSudIEl5aL7ERK45yK13RqP9W4G4YyQHUUVPu -y/sKOCrpGGJHPJBcG8Q0urqeRPq0DkLP9qD626+By9cmMpXW9v+Cc+ZWV33J6dgA -hbBRxq6ruom4UEbNRASkcfVDjqGHUnIhbHI2zgJhNWFvHYt1cJoYPWHXza5lxt/f -NFw7GOwNZolB1QIxv92PAY/PAE7bU+vAjOc93BkI1V6zvNdl93ehCxB3froP0pTY -o4HxVT+9scFcopUH7yXEOvMGXErS+a83ma+0Lp3uEC5be9TdgcmHlHEDfRigiMoX -y1D2lRhMGC9n1kiXsIA6zMnZD7rqlzK+jJSth5yAqFyJOLzD7KKXmCTL2wBpspCM -kKHHvq+VuiLJC1fjkGdyb0gzA4YjLkGIchI/gFdFF6l8/vzGGIXwV5UiranVu2y/ -2gWUjohF17zeBc3O6PaLZLPaPj3g5nFvKGXSj3E7Qj5gtjXdYq2hU9vfShFFCXzg -vtQS+wJZ/mdZAYrHh0+BQ5v5hggXvyQzTUuoxWyAp1iZx0GmGuqHf3gy7o8jwD0K -KfHSnIepMaBR9e+XYQBmvLfFQLrt2KaoGyc6Js7PXMloPl34AiEu1Gr2wftZU6tE -BKeBJ0hY/dAG8s2zuW5SiyMPvpttLLBmfCY/SvlI9tPCbLHMs2NUof9Kkh6n2pwd -PVyM+HLZiLkfxux7ET/J6QrjU6dVts2RXtHDOu1ZJq0NkCu+lj6hgxGTyoliUnub -RRW/+HIbpFYkOLc1LJvJD/TCylTIe2y4ftuiAIVR5vNR6ZnC9Xfwy9pNkatkeeuD -P0J9gJliBjr7lUT9yEzpzFCntIuU7Z8GRDxuP9dnka8phYMHeUkUpWUfUPtVk3JV -NhuaTllC2zO4cfdgo1pMi47tgC8cOwHzkrf6jA5NeRX9hyoHPpDNaKS1QqYYdXQc -r4MiHwVxg3aUAa0j8EQa3V/XPT4frKAQCJvXs3lzh1TrNcAz3r/IVujvNLo2bXmx -B3BTDT6WTqYXFxBoHJKUV+AkMzE3L63o+SrHlOdcqDXf1aDs/YKhZGCuWNS1GE9z -Fo6kY2LUsnTLuN/dz/fG+FtMuvLtwJKKDGX2LG6vF9Fi+uWEFFnj7+RViXF3EqL+ -qOIs1A5XWQeDmFtAk+079sTii0/AdZPR9myF7rNJQOc344Rx+y1OMr7jV08tFugR -D/N3SldpFOQ9Yrc8Ks+cSgYJwUGvDLSg3awNq1wv89hWY7p5UwRtntlz9Evmbjql -CmZE3sbYKupamNaS6rQmyf+Q8kEy17l9Ld34cSd1n2slrUin1KyJZfMZ2F5f4If4 -iXWxlfrI1pJ94F+Ud2n/nQOOiJ/qN08dX/N7qHqV7qJWs3k8eoE3/rdllfaeyllg -OvFuF2XsylR/FIPjN9MA81FfGWV8tWfX/RtHWcTrPutwrV/G/OERKLBvSsTtUxDF -P2bZewouZqUnjXUiRQoPRLPFHLjnmjwHAaVVSHZaHcePFz7pkSlyl7ANfXcl++KT -yK8SnlXkeZji9e7M/unspiI5DWG9HNnEItgmwL4v/XeurN1uPpX/bqjSrkPPASFF -OvlIYqR+Z8kjPuy8oWKzlapquqaqgGrNjvThX57fbLYvgLCpAkRPk231cVXMJBi0 -rE80nIMB14mt3qM+t6FwsMILYrqb78ZI9f9FwGI5eP4mHmPwL8TUfnXty2ZslPW2 -lswgh8GG13272+U4jXVYQOyhcxPuAscTsspUxZH+Lo8EdqpweinxjcGh+DBrQgFq -W+Cfj253dR+y7u0v3lP859gnQKVZk/UxMebGZ3egRniUAZDp0vjeVoqY9RJxqAQ0 -NY3CU/Yi2AOa5bIaaCBbdJGKzTlQdphwMec7q8hlpJwE8f5j2CUmhHrZ/78FsSg9 -uq1zDf9oeouorQun2P4FTtSAa+Hf3MHCTtRoqCMgtlzdBsS4ZUQzSlnqkzKUVJ/9 -lHUeP9MliDwBba2dkCEU01g+BThz3cRn8qEVo6NTEWvxOCG4VB9bWlIDC5UyX1Fz -MhnJe8wLIQwU4DwUex6UWc4yvWvy+VBZvbokmojiYQtFxpcnbhTuXaRrEuSjv9PM -ucmG2i1E7ACQ7+a5u8Ot/ssip+Ox6p8V16LLSKskDt7mNagTG/QYiCZhC3qFyIGV -SdjatCvVIeKx1rYtdslFGxD/UzivV7xPfQHWPU8n6wHWfAVjpUtxsXBJ8/xok5VT -vVVEz/t5IU1E5PImAF/OVfr1kk3/OvG+0cA1KdVplpXE5UocNZwk9+xsX4oHxoto -W8N9ePxRFdCxp+OyVNQVWEmXziJb5orisDDzbekxGLpIQdTNK/IeMsLzA1xxoSJ8 -VEKtk+Bwi03Mj1CBoR5L7zJQiUuZi7rcalYfG2OeOK6BlRDo1o69a5Z29Mry3Zcq -x5lsU9wMhZJ85m7yBmSGMgKO87kbsgzOB+vDWv56SLAWaIMxgtQksDQeIHrBBHmI -ndhvv46XSzfTIeNkSYXTAzjg49ibqz/ftuTWzXrpeoqehUlV0DGoLfXO04vdpkhW -dsZN4qkbKAISGYWETldWrc4SReKewWfs9CqYNoKhAxDI11efUhTLp5o4GD+LVO5F -Zo9B8wZ6AwxqZZkzY1q+GWKab4MM5K0NhgkvuO2XmNMkHf2Pd5BLG0tf73UAXXsj -iFFkYUapjW76sp8zO9+2um8pmV7HIDxNOfaLQVnJW3E+4Qtx4jVoHiAaR6Kr0JFh -5u1cX41mU888IKunnc8DkdSFu6DoiHwIM913nKfCtE+2tp/kzkN7GYoNBqEmhkP/ -RHqiN8KcbmlYZbo0gMjzSRRgnTdJhfTPctaZOylBGJKAObj7hs8c2LB5jxuHyRMb -wov/cqPMJ1Hqx+dGcCzWtdi+6M10596P6KHgfUIMWJKqxFtwEkG6TWtG3ivQpFX8 -w30mImFWKtZSx7XMs/HeR6OkzFIOjm6EAaZ7PGf6DjGrQNFKo1CuS/EIq1+x2v2E -gnUhPiHEdf4sHhOVuZxmniCvnfWOiWF0hj4XmmdqpWyFIi8kJEqU9o88oDnmSMCI -D9lqLQBrgX+NefvxHNxSul1ZMFLQ4weVCcYSyjLsjokoFwTNqlrWQi7rLPPmMS/Z -4O6PDlTEdm8DS7bVgGrTlNEKMWyAE4TEwPDaPaqBYZ/Rp3cCFJSWohpXLenNXJSK -XF5Xx5z6BkY1zGmSJMd1sUPGb6AwxEceywsxDj/5pd9fnwDvqV6d5+g2YaB6Iq0p -PcURws9MzWpewBYUtBkjn6+FQfubVM5Bkr/u/fw2UP1aJ5+x9GDWyMKY5pzxgS0M -FpnuplbrV/30B95qLVykwal/aC2v/aECUOmJsjwXZtAaqoAyFMR9eZ+W0a64k/Tw -v75b4G7jCa681kKeZp6qSQhc3l+PcIG37SFrfsrwLNF47CWEyNULbCa/8jabLTxm -Swxi2tcE0zAoyUv7tpAkN0GR1knA4W+k2Sg2iMppSQ2lV9lGFhTYbRohF95kGZWp -7oht+oD3Y2nwx4TWDA6JjRPHVKBQs6M4I8T//+fbfZgJiD1lx/ZRPrR7jtm7M80s -GDRQvtZ/aul9R0KHweNZHr9o55ywYELf/BNAuUYbH8qeacg0Lc0y69rep/Dk46Hi -EgVrc/jCSKLXm5Gv033BJL2NEZK5nsyctVYXBrk9rA3iO827jZDk5o1l/znOETYh -Y488IK1wfbSvfllyWLoHgtHsr4I+jVg2cgPWK6+ST87GU12almhMPQ2/7ifO52RE -h+ZyHO9+E9y297aMqeozrqUX/7Tr0u3mnJ7bWB3OVz/yezIjw30N0/8zkEM4aIHU -itVupvRqQUxKPBsld/QJ7j/X3oVDX2AWHRMMsKRpES8fWxttWa+DEBxSFCPluAQP -zt9BFDSboyztzdsIsB2Gtd6WRJH0ETOuqvY6/MMqPwWZy8FYOaiJFAZ/Vt+6T+jE -jFAz3dBQfMcYYW+CYt1943atXCm5V2xB6pSqzfCxmgjT/Sq0LLEooME7lY1Pwgqn -wj7VhDuAEw3KtgRheg/pqD6Sf+0pYNx90ormkU83dk6T9aqi9H/mSkbAP6y3OeHM -B99f/CorcIntsAQo+BT7CFx5Dt6kjcVBgtiEJj2J6JMiNFaL1xNQDw+29d7aH5cl -XuKkO3FjTuyAUk8yyllbExK41yxQ5/97vmKa1hO6jQFrsYmeLj18wX7sBkvN/IHs -FVYrverD4enFrQhRRkEstB53+a6UAUJAB27gUbAJ6o8HdCFVm6UuVAOYJOI+ZrsA -ai7akwRGuAz+e5NTYOY8LwOWwuXpBlMklsIgb7Qf/NzncoZALOONNjZ3WS+EwdvI -c99VIvuTp6CUg6BZ8JwU2Hff2jVbSyduvu+0ZC2FclymJsNaFX2pcudwFHJCd2Om -FUNxQ7oCefgu7jA+ji82hGhLWjEM+bur4GesVQdLXxvx1W7BHRJKbp94gaJpvcFk -PVSeN2fdbwavapm+8S+IbB5ciebPpiK890v5LBsDPxR4yjVcPwd6Ssw7lxf9jaly -8LnNAqRAlVbU5DZmItQDC2NoncBCxgkFIE52w7nq6oZk5THNunApuQrGp9TdB6DH -W7kWj4k2ZUQcoyr8L68grduQT9Pc3QHfefwhasfUfg1Rn+I3JR95qXh5zI+ZKmx4 -bvRrtJpbSUt4mtY1B9pl9smhg0Fj7nvBMAcZ12967FUKQQL2VBRcpdFSQNgFR5ml -F/imKA5ll9DeNDBf2nIMwWLjIQK/j5rlDlaG5cfvmdEjfJAIXf3WrIEsDR9Q4AX/ -9xJM7TKW27TqaRJeeZM8b/GAIWzdpFNbqliKR2JpHwqlb7dNkfV0qSVoXopQvpUL -H7bJMEDzn2ruokLzymy7fk/OldDE0Cq41o8VVXpExxTIfMxUlTVJtn50xS2yByg5 -NQfhwlpffq4xOljWwyLaHbq6VQYmN7OuxhjTboow+M0s3tQqA8sosiKjaAg/t6vZ -6HzlmSNoLgd4GOg6AABqblf2fjbD/4mpSHqsnbIIqtVktFtnzWiEU7AphIsztXSf -0HnB7l7xlJxEM6m5sQcP8FNDfErWMdoowBcJgF16QMS545ZqzPANokeWMSl9D57Q -ZNisoZsMZ3UG/rL6TCPMQnqHv3kH3Co1yl+DNrr8dGUC4Bhn6nVg6BFBuiNSeQUQ -uVvJ3PuYeC3NonjSMiRxwhkeST4XfEg9S/UZ+ApLNCLlM5ql5ZJObui2uKFzTAed -jATO9xv44JZF8S8Uva59mQvfC/+CjDNHcOOrrJy2d9OZPu8t4INZs83UTynUxh9F -2Ln3kYZ39yCTwSvB3+kV30R7/n0p1Cfdl7M1+fTW99jKXJ9m9pqjb9QhunOY7T12 -3yCdyTsdwX/PiTIUwM/pKZjw7xuQixOXR+OZpiHmvMnrzUSeMZsxj629E1mrRMhc -jDZb9ZnQOoLNggbaKTRuaE/DjpACpuMpi6jmnjVPAzMyGDOtQ7eaZx5lrpBGgv7z -pzd7SxXJn7ytfWu201TStZd0FyLjiAoYiSn4wNc1itbAhjirRLQxvHvaUbGmkAiY -kdOjeebjg4duEErxaDcRLcKRWk6MWeLH+xP3GjWSeQdalpfXRr1Vbs+ErAC5yWoO -D5W24qzZbzFeWngKEh/Aykchm42ZY5oxsbg41frjD1eKiAqCs+Is8x9CwHtu1uvN -jwxcA5VxkrxZlrVyorm2+mmnZ6LPwccUk8fSUnR0HtI+g/lEVrr72ho8qGVfUBau -4Muc7tg48iggb8Qs6wTuhG2ykBI8agGMV4aIjV1P1pBniXi3RamcMphUkSSplFPV -nBEx5RzaRyQZrtoBsPfGglpeOIwr61l6zgTtD6gWFwCvFar83mihzMzaSo41hDnb -K0EiIZxK3W9jnqV0QsCdKxb9PMrBM3ztAdtV6HJfsZzXxqD+zCA8Hz8rO8+NFgHa -42G6n4nEfXCUuzZOGgRMFSyCFZ3g5c8l+ayfCSk8PLF2Lx5K8cbu5qZrhepicJI0 -2+sEjhpf72oyNmQ3B1lUkRcRRgKWQGfRU5raris7oSiou5zmZUgScowNSyZHObGI -9cDxBrTWbm5kkS9UlXMBWs1Div//UJEmTnbwp3evZ6kAjunedxIjI420jIGD1VCu -+QQ9OZFak0glvOZ9L+9AbcnznmHrOd18qHazDPTwmpf7bAvDIWMwj3K/m+bXIvjY -Oifh6YR9AQiZuvkvFehZR0lFA/SciFejES7UOyfTqLPYDl0YC8tywGaGtNOrXc+4 -p9kYOeYxeKZO2DuOLr9rWgebSi8OCQ1MsigPeYintUOjwDHbWhLZ6tnnpA0/5+V4 -S9ozLh0Zp3P8PyyYUCmRUGEr7Ly5Ec4DM3iU1JBp++ecGCNy9HY89LCmBcicsMV2 -uqP/umcyi3rCYR49SlvGlUNFeKwFxFGoU/zH7MXSFMBUfulLy2EPS/kX9CdgA7XM -Fu3QGYiuyw+0SbBRAAxKACUPiRy80TUidmxSzSWetRlqdW3T/uOMDMuxZN/UbEsD -nqVJJlH/7VBI50cME1SghRw8Dn2Y0wz+pSIFOR2ybzRZuQnsQ3hMZ/tqzuyBsMde -rtHFl6quRela25mnvKDsOuk0X2f5buDLP5Z8ypshXxOyy/MM8aXbvclq/VvpHLu+ -pmE2cztI6bWlMfEABeev9DZ7zjZ4ADPpm22LULe4ZGZlnkR2IjQthPSlnDZQ0Tiq -s6GQCPZbvxTOfQgtgVIQgHph/j+AuYOGGZDrGTyjAe3jeZyaAPL9mAPvKN0v+egr -eJ7dmFnjojFPYIY1JGQ++d3X4AGG/08HnxagRwEsaKdpF+w2ou70cW1TupdSOg7Q -6as3IlWRmPJLExz75MLKcu6jdX6i9b0LkuSZ81PVmXcnsCFaia2acbNytLyAMtOI -ggFvlhaaOueElE6G4LgfB57Qler3ipEO3D8Phl6bRBob3nZzmMSEJjICvgMWS6A2 -y0klebEHZQ7nAuSQRA73/YKty5HuwKK5Q+u7n+kud/nYuB15bCC7HsD9AnXUZWY5 -Wuv5r3H/TDmmqcACbFLjjgBQk6xhqcJ4n8SiN+wLhCSRudPhuwmNppHqC3akHwzH -b235z+FnKmTcbODzqJdiUDgSR12doVX6vMVhYm8rSWms99gou2dcAEtN/VhNXh7U -ZpYY3fjcp+32aSu/C+8TT6IBnUZuHvHiE+WbRf04xCzkOs9TxAz4v/8op9oirgln -cPnpxYY87jlMcH6P/fKzT3qZlzpCYfMAeTkFWkwtZ800nDhlqWVDqwaaPDiTskdo -MgbdeY9En07i3sQLCpLbTPUuJZAjJF4Xd6veORjRM+VAkTDHDZDNjW/sOYylh7W4 -8v//tJwAnpJkmeqmnfB/m72ezVup3LGLM1uCqk5F3JgHu8Nh7cK5Agl36fx9osva -fZARlXRGZHnLgIOsjTdxJhP6vTcQ5LQoObb8ZVmDwo24/6c+RuGDnYq14paDCqym -TXUoEXXVrNNgr7dlKnEULogAiEvL/KU8FN4e63TaxrE6vPBV2yjQTukEBS7tayBj -8rJzExkVX8DGJYQiE5KSkK3rLnJCtVC10w2JEpvS8MbMRH/pcRHDKWJ4K7g8Svk6 -S9157mRQTxwCUeJ1d5lJs8UZiyG491OJQpdDb9Nvk8+rl68CL9/+ecK5JDVCZEl6 -bz+GHeSNKpxlKVlJBnJ1EnGaoLhGxxeozCWWbSlfrg1MYXhfBGEaz4f0AcLLZUQK -6dDoJIEJgsr+z4IUfKp8/pBT85jHoVJjynpIpJO5ry6QmKevONKpCxIwZIF0hulW -B7DF5jLNXyxoK3rYMm3h6TyPFjMRpFqOWRiikanwqUkr/Wla/pVCg1klxijXYqlF -ppu+uk2u//3IjJTsL7z+3qHAzkz7YNvU9sG/jnW0cdVPKxgz3I8KwpDB5iTWBUs3 -S80gfR0KvyPC/OCORhkA4uEYuEQbLeS7EjHDkqGHvJ9Pbn0zbsMyQXThcVyO3vmn -ZvpvTyIXFoKmEsRHNisDv2+1CBLtUz9jguQp7p/Ivw7iUaTIJnXqFghSj6uX0QIo -9Kd90aKBSMpIsIPEq7QWaaqg7KptvE1iGV82OXFTk85yqiaaciLl48tTFBIhY0E6 -Hs3dDe8E+dEAkJsw+E77TdUSLxII3aMPVAfcHZ6EzCVTpYalGAcI+NQFF929WTNH -lp62ZXwm7dibcSs4IkGt8+H9nkDWI1Q0Rm7ssBwxY8oU2B+GAqCWVGFb7zvU6cO0 -bzNypUiHBAdRBLOL97DRE957pmo59guaUA4NwZ528BpWbhLm9tVr13QUYhkirB8r -VYYHYsjZt2PY2+EdG4jcX3tJ15KVL4Q51a7Xpiad6dKZOntHY6AUupnx3OFGIOHE -Hojn0BwNRdyJMOnAXU4ItrDtOFtDBZb3N311uZPMHbz9xt9UMXCKC8cRF6ThITOz -osALdrnkHTNCABIxpd/Lks8pStif7/wU7pHKEO1K0AP+JNS1ZEHsxUEmUFcqr1or -mopzC/nYcEdzE3GXYfdboA+zT9lRDc/2nIM2A7EAU8Nb4x6Th4P/bhG/psb/WXmv -mH7P8X4dYwHqF1jOM5sYLybVUgzhBEzlsnrL3ueu2Qee/2FegJnAqzoHVaGUYx9w -K2OZ02lVF1MyECYVwO1FhG2L8UgUIfdBrecDMnIYTmRmmHAHC8jcrvhPAMnw8wJR -VuQ73QMpOLpSefj1jpJHwk7Co4kEN4I/VSXt+RwRARbb39jJOGOvo5XiJGg2+sAm -m/RSjkK0NRksoTyB+rnVtS8LbOIT+HAyE6mJBGWM88lJiWcVZj0zT3OXZeN3rcwl -HO0AVuaXLmoiv35QB38t5ACxcNgP39rjqiA/HwOMXasPDnhegITXW1a1AykmQKxW -Fdr9m0f6+iFTZMBL1hT9ucBIKSHv7uTOR8gMR4CC+VLhaBp3JEOjkdsxEuyJGf6L -R/AdoGjmyI7nlm/UI8zPYDW5eY4zaNMRR/R3ahB+s1hqZZhk13vuvDhgB+tqt3Be -ni2vzphiSe/5aXCP5tA+gQ+eQ+mS08tFWqETQXTZ9ppywyli8Bg/5EAmpllE5Yt/ -tF5fhBmsTJn+7OWASQajpNnhDrTeuQE3+055f0Fuceqivu7w2OEioOSFBCASVGjJ -lw5boZ/00YMDL9069qNTzvQF1Lhaw3JbLCdxRzRdGkAkL/cvF8kP5YRr1Ez8dELr -eGI3e+gX9wi/ZrhBwlPa4Gzg24Kk0H8Cx176L3/OsD/xKvFyI8rXkylgOAAMsOjU -GMsBUcclRhz8lAMQzlLnUCyBzHVcIvmW1tosz61+FTYl+PsLa6vmQAUnTu5nL2qs -yaneyN0jXqxAEV1dwPTpelAKG50J54OCEdtLeYRufPtYffTekawLDb/Mvd21OywZ -SlQT+4vlcVVa9rBYXEArOjSSPyAnHl4yj3k7bqxdDOX6TfG00m7rOnEVZui2PZIM -AQRCxLINtm3meZ4LrYr7VM0/zoMh2cpxP0CIiSSv+dxM7HOKiFEJTdH0B5VqnYne -nMRjOq/vpOtsrBU+aTZA83AqlKhAyhh7Y5fMg0ELjb3Tf44iuhj2WYdvSjJa2UzE -PqCvT+KnyAaAcKnCltXMYcFIZvpnlrkkc8Uy67kqkblH1rnUUjTuKZOBXE2Ks7zP -tmvsVqK6ONfUH1jTJYcuxHADOswewXi2r/ZbypPZtplw5tVxNTuJsjGKrkR2rsXF -hu6hCC1DwXvyrwkcyNgoNsu8+NirtxlYJNuTHcDdw0ZPGw4fJO6uDFWv0csX8E5q -/sTodrI0dQLijidtYku8eCeqnUmYBXjVexheqrTnuelIV9TQJQzG1iXeZUphNWgN -ss+NQI5ht4KeWgqDM8bQJG48921O/rR7zy4dcsxn4PuJQ/fM1CWYBpfXGEnB7msW -bSFlsr8IgFukjKkOTqSiMmHBaur3/y8pyWtrJNGJdjqUjgiFr3nOc0uT8XWNThRf -r7eTTANk+8OvS8K82LcpmXMzaUkJ7rtgqfg6+cyMDtqQalEh7ggFqhRI6Kv0n+4h -eI8gX+McyUJo47f3ew9qg0ccO/zadKn6WdTNyzkPDXxy8a1UA/8xZArCTVjhAoRI -gkQem2x3clbdBnUsByBMl7rzIgOhdCqx+dC2ghI8ummmJLI99Kdhq3vy8qlbLA4o -zauw/b96C/GPrGVkOxHDGkhc5MPEadWiJZNuouqt3Qx42lgy97RWRBsjBrgk60F8 -XxlUXQ8bmI/aBe4bBXLDqZEiALgowYWCvS1JkMpmjSl+vcCzXL7XypGqek1ZFrQJ -D3hw+JnDseg/8xsle6RUR9MjrGZZXsgTQ8ULAmI6PC8K7WCJGvpHSIw5FXqGDuJM -v3/yRRWHDDLIwKC+3doaIAVD07ww53URJ7lweZk4fjTLIkRz6U7Q9fEl6hZVH6wU -fUqRQecAXcdc+2ZUKmmo1+yp1RHLuo3GM25bEg2TcNQkuwcfCgSlGprXQEOOScH/ -xj8fzxi+l60xgPNdg+TcbAECfpXbroVNkv3rw+axN5TDmaYa3pNDbiGykDpp+u2S -4ussqfHGXTt3SeXnDlRdqyZp9QNbn7cCfAivqZyR0CzFMOpIZf+6u9aIGpAdQVcS -eop6iEnbUeEP5/+EmcEulIlvqh+xKk5ze2YUqE6W4psxozpcVdv/B4hlOfmVmMBf -IcWUE4qyea0Gcpg2BcgovgmYMomUN+QTCRUYCB0l69k8gwjrUJ1H+yvxGhoZdwkO -7PCYujnZF4MZOqN6FSsT2MrUwlX9W15KljHspARPuL7gcAz9qmEAJg== diff --git a/scripts/hosts.env b/scripts/hosts.env new file mode 100644 index 000000000..8351ba350 --- /dev/null +++ b/scripts/hosts.env @@ -0,0 +1,42 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# Source the envrionment file +# if [ ! -f "info.env" ]; then +# . ./info.env +# else # check if the env varas instead have been exported +# if [ -z "$USER" ] || [ -z "$PASS" ] || [ -z "$HOST_SUFFIX" ]; then +# echo "This configuration requires either 'info.env' exist or environment vars for the z/OS host exist and be exported." +# echo "Export and set vars: 'USER', 'PASS' and'HOST_SUFFIX', or place them in a file named info.env." +# exit 1 +# fi +# fi + +host_list_str="ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01132a:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01134a:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01135a:ec01135a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33012a:ec33012a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33017a:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33018a:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33024a:ec33024a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33025a:ec33025a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33026a:ec33026a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ +"richp:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ketan:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ +"iamorenosoto:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ +"fernando:ec01135a${HOST_SUFFIX}:${USER}:${PASS} " diff --git a/scripts/info.env.axx b/scripts/info.env.axx new file mode 100755 index 000000000..b2e6308fb --- /dev/null +++ b/scripts/info.env.axx @@ -0,0 +1,15 @@ +U2FsdGVkX18VoSEji2kkFCFNcDHC1mzJ+hUulvTheU5dL9E/lmDWS6qdk8R1VCPJ +WyRU1Zefxvc1fw/sqvmzliUgBXXN6dOgRv73+ap8vyp+IvUhCVAZl9efFXHZ2Eag +6loROID0Qq28Bd+5Btqk/JuC6az9QvnV1E4MRhmZBtCIJ8P/joXKIigR9KHGvL0N +7PpA20UxvMzSH/vQSFd0zkuuvjAAzxN/AVO3W0Jbw1fmHy0gqp4TxidqXF0JatdC +YtDadHqyGHCid3hDP0+GwS4yCSEL/uNEE1e3Npe5EF52YB1OE5y7WqJFmQT1OdNd +pkpPok73YNyPtetMBzIr6t3BcnXHL1j38lrDcMZvBy9RWQ2LQiSxmRiGanEg+i9L +SBapVYDJJ64eKZd7T7gY4gViytT0/i6IAqgGqoH0Dk9LQnGmQ7bOqi34zOna/iC2 +PFzx8XFZF/BmXQm3/96xJsRv15IMKCRp2t9lha0N/FRVmEYp7n5loi6oj5hCtD5k +CV1nbzO9cvMH1c85LUeWjTfcEmTA0criSCiBY3zLywrBvs6XsV6EkITMjPh1K2ht +AHXVPykPHhG6+F0LPYS4gasc0jLRTCxVyPRrl3tSf5aGvvo7ilsZrUtVh2UKUkuN +bjpUHCsrsV17LZUb5fWbY3B0EB1NxHa2rO3cb0ausUd+Mf+02SlnPsnaxjX7lTna +ymUlYs6oQcfAfhHM1mCf8miS4ES2HBdl9Urk9BiepSRJudoaBjIL/L9IsaInYpdv +BfW04gocwKJOhhGUE5ql4+DBfoCrWbz4bIGlUSfEIdFiRmsHG8723JQXgq19c4il +oFe7inTT14QHNsI7JNWmDDxsBPkItgJJ00JR+WwZd77jDTHJhlXuf8lYevQCRKla +BDZ3DlqvbK2ILBWFz6XTjPdlNu2fYsxlW4R5kLKsTyI= diff --git a/scripts/mount-shr.sh b/scripts/mount-shr.sh deleted file mode 100755 index 7b2048aaa..000000000 --- a/scripts/mount-shr.sh +++ /dev/null @@ -1,92 +0,0 @@ -# ============================================================================== -# Copyright (c) IBM Corporation 2023 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -# ============================================================================== -# Mount data sets to USS mounts -# ============================================================================== - -set -A mount_list "/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" \ -"/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS" \ -"/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS" \ -"/zoau/v1.0.1-ptf1:IMSTESTU.ZOAU.V101.PTF1.ZFS" \ -"/zoau/v1.0.1-ptf2:IMSTESTU.ZOAU.V101.PTF2.ZFS" \ -"/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS" \ -"/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS" \ -"/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS" \ -"/zoau/v1.1.0-spr:IMSTESTU.ZOAU.V110.SPRINT.ZFS" \ -"/zoau/v1.1.0-spr5:IMSTESTU.ZOAU.V1105.SPRINT.ZFS" \ -"/zoau/v1.1.0-spr7:IMSTESTU.ZOAU.V1107.SPRINT.ZFS" \ -"/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS" \ -"/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS" \ -"/zoau/v1.2.0f:IMSTESTU.ZOAU.V120F.ZFS" \ -"/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS" \ -"/zoau/v1.2.1-rc1:IMSTESTU.ZOAU.V121.RC1.ZFS" \ -"/zoau/v1.2.1g:IMSTESTU.ZOAU.V121G.ZFS" \ -"/zoau/v1.2.1h:IMSTESTU.ZOAU.V121H.ZFS" \ -"/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS" \ -"/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS" \ -"/python:IMSTESTU.PYZ.ROCKET.V362B.ZFS" \ -"/python2:IMSTESTU.PYZ.V380.GA.ZFS" \ -"/python3:IMSTESTU.PYZ.V383PLUS.ZFS" \ -"/allpython/3.10:IMSTESTU.PYZ.V3A0.ZFS" \ -"/allpython/3.11:IMSTESTU.PYZ.V3B0.ZFS" \ -"/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS" - -mount(){ - unset path - unset data_set - for tgt in "${mount_list[@]}" ; do - # TODO: Can use something like the below to find ouf a mount is in place and act on that - # df /zoau/v1.0.0-ga | tail -n +2 |cut -d " " -f 2 | sed 's/(//' | sed 's/.$//' - path=`echo "${tgt}" | cut -d ":" -f 1` - data_set=`echo "${tgt}" | cut -d ":" -f 2` - mkdir -p ${path} - echo "Mouting data set ${data_set} to ${path}." - /usr/sbin/mount -r -t zfs -f ${data_set} ${path} - done -} - -unmount(){ - unset path - unset data_set - for tgt in "${mount_list[@]}" ; do - path=`echo "${tgt}" | cut -d ":" -f 1` - data_set=`echo "${tgt}" | cut -d ":" -f 2` - echo "Unmounting data set ${data_set} from ${path}." - /usr/sbin/unmount ${path} - done -} - -usage () { - echo "" - echo "Usage: $0 --mount, --unmount" - echo " $0 --mount" - echo "Choices:" - echo " - mount: will create paths and mount data sets." - echo " - unmount: will unmount data sets from paths." -} - -################################################################################ -# Main arg parse -################################################################################ -case "$1" in ---mount) - mount - ;; ---unmount) - unmount - ;; -*) - usage - ;; -esac diff --git a/scripts/mounts.env b/scripts/mounts.env new file mode 100644 index 000000000..8f944d971 --- /dev/null +++ b/scripts/mounts.env @@ -0,0 +1,75 @@ +# ============================================================================== +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# ============================================================================== +# KSH (Korn Shell) Array of mounts index delimited by " ", entries delimited by ":" +# More on ksh arrays: https://docstore.mik.ua/orelly/unix3/korn/ch06_04.htm +# This `mounts.env` is sourced by several other files, only these lists needs to +# be maintained. +# ============================================================================== + +# ------------------------------------------------------------------------------ +# ZOAU MOUNT TABLE +# spec: zoau_mount_list[0]="<index>:<version>:<mount>:<data_set><space>" +# example: zoau_mount_list[0]="1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS " +# Format: +# index - used by the generated profile so a user can select an option +# version - describes the option a user can select +# mount - the mount point path the data set will be mounted to +# data_set - the z/OS data set containing the binaries to mount +# space - must be a space before the closing quote +# ------------------------------------------------------------------------------ +zoau_mount_list_str="1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ +"2:1.0.0-ga:/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS "\ +"3:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS "\ +"6:1.0.2-ga:/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS "\ +"7:1.0.3-ga5:/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS "\ +"8:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ +"9:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ +"10:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ +"11:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ +"12:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ +"13:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " + +# ------------------------------------------------------------------------------ +# PYTHON MOUNT TABLE +# spec: python_mount_list[0]="<index>:<version>:<home>:<mount>:<data_set><space>" +# example: python_mount_list[0]="/python2:IMSTESTU.PYZ.ROCKET.V362B.ZFS " +# Format: +# mount - the mount point path the data set will be mounted to +# data_set - the z/OS data set containing the binaries to mount +# space - must be a space before the closing quote +# Mismarked: "/allpython/3.8.5:IMSTESTU.PYZ.V380.GA.ZFS "\ +# ------------------------------------------------------------------------------ +python_mount_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz:/allpython/3.8.2:IMSTESTU.PYZ.ROCKET.V362B.ZFS "\ +"2:3.8.3:/allpython/3.8.3/usr/lpp/IBM/cyp/v3r8/pyz:/allpython/3.8.3:IMSTESTU.PYZ.V383PLUS.ZFS "\ +"3:3.9:/allpython/3.9/usr/lpp/IBM/cyp/v3r9/pyz:/allpython/3.9:IMSTESTU.PYZ.V39016.ZFS "\ +"4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz:/allpython/3.10:IMSTESTU.PYZ.V3A09.ZFS "\ +"5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11:IMSTESTU.PYZ.V3B02.ZFS "\ +"6:3.11-ga:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS " + +# ------------------------------------------------------------------------------ +# PYTHON PATH POINTS +# spec: python_path_list[0]="<index>:<version>:<path><space>" +# example: python_path_list[0]="1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz " +# Format: +# index - used by the generated profile so a user can select an option +# version - describes the option a user can select +# path - the path where a particular python can be found +# space - must be a space before the closing quote +# ------------------------------------------------------------------------------ +python_path_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz "\ +"2:3.8.3:/allpython/3.8.3/usr/lpp/IBM/cyp/v3r8/pyz "\ +"3:3.9:/allpython/3.9/usr/lpp/IBM/cyp/v3r9/pyz "\ +"4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz "\ +"5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz "\ +"6:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz " \ No newline at end of file diff --git a/scripts/mounts.sh b/scripts/mounts.sh old mode 100644 new mode 100755 index 0fcfecb38..7ce7252ca --- a/scripts/mounts.sh +++ b/scripts/mounts.sh @@ -1,70 +1,632 @@ - # ============================================================================== - # Copyright (c) IBM Corporation 2023 - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # http://www.apache.org/licenses/LICENSE-2.0 - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # ============================================================================== - - # ============================================================================== - # KSH (Korn Shell) Array of mounts index delimited by " ", etries delimited by ":" - # More on ksh arrays: https://docstore.mik.ua/orelly/unix3/korn/ch06_04.htm - # This `mounts.sh` is sourced by serveral other files, only these lists needs to - # be maintained. - # ============================================================================== - - # ------------------------------------------------------------------------------ - # zoau_mount_list[0]="<index>:<version>:<mount>:<data_set>" - # e.g: zoau_mount_list[0]="1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" - # Format: - # index - used by the generated profile so a user can select an option - # version - describes the option a user can select - # mount - the mount point path the data set will be mounted to - # data_set - the z/OS data set containing the binaries to mount - # ------------------------------------------------------------------------------ - set -A zoau_mount_list "1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS" \ - "2:1.0.0-ga:/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS" \ - "3:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS" \ - "4:1.0.1-ptf1:/zoau/v1.0.1-ptf1:IMSTESTU.ZOAU.V101.PTF1.ZFS" \ - "5:1.0.1-ptf2:/zoau/v1.0.1-ptf2:IMSTESTU.ZOAU.V101.PTF2.ZFS" \ - "6:1.0.2-ga:/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS" \ - "7:1.0.3-ga5:/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS" \ - "8:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS" \ - "9:1.1.0-spr:/zoau/v1.1.0-spr:IMSTESTU.ZOAU.V110.SPRINT.ZFS" \ - "10:1.1.0-spr5:/zoau/v1.1.0-spr5:IMSTESTU.ZOAU.V1105.SPRINT.ZFS" \ - "11:1.1.0-spr7:/zoau/v1.1.0-spr7:IMSTESTU.ZOAU.V1107.SPRINT.ZFS" \ - "12:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS" \ - "13:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS" \ - "14:1.2.0f:/zoau/v1.2.0f:IMSTESTU.ZOAU.V120F.ZFS" \ - "15:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS" \ - "16:1.2.1-rc1:/zoau/v1.2.1-rc1:IMSTESTU.ZOAU.V121.RC1.ZFS" \ - "17:1.2.1g:/zoau/v1.2.1g:IMSTESTU.ZOAU.V121G.ZFS" \ - "18:1.2.1h:/zoau/v1.2.1h:IMSTESTU.ZOAU.V121H.ZFS" \ - "19:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS" \ - "20:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS" - - # ------------------------------------------------------------------------------ - # python_mount_list[0]="<mount>:<data_set>" - # python_mount_list[0]="/python2:IMSTESTU.PYZ.ROCKET.V362B.ZFS" - # ------------------------------------------------------------------------------ - set -A python_mount_list "/python:IMSTESTU.PYZ.ROCKET.V362B.ZFS" \ - "/python2:IMSTESTU.PYZ.V380.GA.ZFS" \ - "/python3:IMSTESTU.PYZ.V383PLUS.ZFS" \ - "/allpython/3.10:IMSTESTU.PYZ.V3A0.ZFS" \ - "/allpython/3.11:IMSTESTU.PYZ.V3B0.ZFS" \ - "/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS" - - # ------------------------------------------------------------------------------ - # python_path_list[0]="<index>:<version>:<path>" - # python_path_list[0]="1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz" - # ------------------------------------------------------------------------------ - set -A python_path_list "1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz" \ - "2:3.9:/python2/usr/lpp/IBM/cyp/v3r9/pyz" \ - "3:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz" \ - "4:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz" +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ------------------------------------------------------------------------------ +# Description: +# TODO... +# Maintain: +# zoau_mount_list_str - zoau mount points +# python_mount_list_str - python mount points +# python_path_list_str - python executable paths +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# Globals +# ------------------------------------------------------------------------------ +cd $(dirname $0) + +# Current shell, bash returns 'bash' +CURR_SHELL=`echo $$ $SHELL | cut -d " " -f 2 | sed 's|.*/||'` + +# System script is running on at the momement +SYSTEM=`uname` + +# Array where each entry is: "<index>:<version>:<mount>:<data_set>" +ZOAU_MOUNTS="" + +# Array where each entry is: "<mount>:<data_set>" +PYTHON_MOUNTS="" + +# Array where each entry is: "<index>:<version>:<path>" +PYTHON_MOUNT_PATHS="" + +# ZOAU matching an ZOAU ID (first column in mount table) +ZOAU_HOME="" + +# PYZ matching an PYZ ID (first column in mount table) +PYZ_HOME="" + +# Cosmetic divider +DIV="-----------------------------------------------------------------------" + +# Supporting bash will take added testing, the port on z/OS has enough +# differences to warrnat temporarily disabliing the function on z/OS. More +# specifically, when using `vi` in Bash, editing becomes a problem. +if [ "$CURR_SHELL" = "bash" ]; then + if [ "$SYSTEM" = "OS/390" ]; then + echo "Script $0 can not run in 'bash', please execute in another shell." + exit 1 + fi +fi + +# ============================================================================== +# ********************* Helper functions ********************* +# ============================================================================== +message(){ + echo $DIV; + echo "$1"; + echo $DIV; +} + +# ------------------------------------------------------------------------------ +# Private function that initializes an array ($1) from a properly delimited +# string. Array types supported are either Korn Shell (ksh) (more precisely, +# ksh88 and ksh93 variants) or Bash style. +# More on ksh arrays: https://docstore.mik.ua/orelly/unix3/korn/ch06_04.htm +# Other shells may need to be supported in the future. +# GLOBAL: See arguments $1 +# ARGUMENTS: +# - $1 (variable) a global var that will be unset and initialized as an array +# - $2 (string) a string delimited by spaces (' ') and entries delimited by a +# colon (':'). This string is used to create set an array. +# OUTPUTS: None +# RETURN: None +# USAGE: _set_shell_array <var> <string> +# ------------------------------------------------------------------------------ +_set_shell_array(){ + # Notes: + # ksh is hard to detect on z/OS, for now comparing to `sh` works else we can + # add in the results for `echo $PS1; echo $PS2; echo $PS3; echo $PS4` + # which returns in this order ('#', '>', '#?', '+') to detect `sh` + unset $1 + if [ "$CURR_SHELL" = "sh" ]; then + # set -A $1 "${@:2}" # parens `{` don't work in z/OS ksh, work on mac + set -A $1 $2 + else + #eval $1='("${@:2}")' + eval $1='(${@:2})' + fi +} + +# ------------------------------------------------------------------------------ +# Source scripts needed by this script. +# ------------------------------------------------------------------------------ + +if [ -f "mounts.env" ]; then + . ./mounts.env +else + echo "Unable to source file: 'mounts.env', exiting." + exit 1 +fi + +# ------------------------------------------------------------------------------ +# Private function that initializes a variable as an global array for either +# Korn Shell (ksh) or other shells where other at this point is following +# bash style arrays. Other shells may need to be supported in the future. +# +# GLOBAL: See arguments $1 +# ARGUMENTS: +# - $1 (variable) a global var that will be unset and initialized as an array +# - $2 (string) a string delimited by spaces used to create a global array +# OUTPUTS: None +# RETURN: None +# USAGE: _set_shell_array <var> <string> +# ------------------------------------------------------------------------------ +# _set_shell_array(){ +# # ksh is hard to detect, for now comparing to `sh` works else we can +# # add in the results for `echo $PS1; echo $PS2; echo $PS3; echo $PS4` +# # which returns in this order ('#', '>', '#?', '+') to detect `sh` +# if [ "$CURR_SHELL" = "sh" ]; then +# # set -A $1 "${@:2}" # parens `{` don't work in z/OS ksh, work on mac +# set -A $1 $2 +# else +# eval $1='(${@:2})' +# fi +# } + +# ------------------------------------------------------------------------------ +# Normalize the array for the shell use, creates an array capatible for `ksh` +# or `bash` from the mount tables; this allows a single source of data to be +# used in various shells. +# Creats a normalized array `PYTHON_MOUNTS`, `ZOAU_MOUNTS` +# ------------------------------------------------------------------------------ +# set_python_to_array(){ +# unset PYTHON_MOUNTS +# _set_shell_array PYTHON_MOUNTS "$(echo $python_mount_list_str)" +# } + +# set_zoau_to_array(){ +# unset ZOAU_MOUNTS +# _set_shell_array ZOAU_MOUNTS "$(echo $zoau_mount_list_str)" +# } + +# ------------------------------------------------------------------------------ +# Normalize an array for the shell use, create an array capatible for `ksh` +# or `bash` from the mount tables; this allows a single source of data to be +# used in various shells. Initializes a global array `ZOAU_MOUNTS` where each +# index contains a clolon `:` delimited values about ZOAU mounts. For example +# ZOAU_MOUNTS[0] has in it <index>:<version>:<mount>:<data_set> where that may +# look like "1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS", see sourced script +# `mounts.env` for more information. +# GLOBAL: ZOAU_MOUNTS +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: set_zoau_mounts +# ------------------------------------------------------------------------------ +set_zoau_mounts(){ + unset ZOAU_MOUNTS + _set_shell_array ZOAU_MOUNTS "$(echo $zoau_mount_list_str)" +} + +# ------------------------------------------------------------------------------ +# Normalize an array for the shell use, create an array capatible for `ksh` +# or `bash` from the mount tables; this allows a single source of data to be +# used in various shells. Initializes a global array `PYTHON_MOUNTS` where each +# index contains clolon `:` delimited values about PYTHON mounts. For example +# PYTHON_MOUNTS[0] has in it <index>:<version>:<mount>:<data_set> where that may +# look like "4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10:IMSTESTU.PYZ.V3A0.ZFS ", +# see sourced script `mounts.env` for more information. +# GLOBAL: PYTHON_MOUNTS +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: set_python_mounts +# ------------------------------------------------------------------------------ +set_python_mounts(){ + unset PYTHON_MOUNTS + _set_shell_array PYTHON_MOUNTS "$(echo $python_mount_list_str)" +} + +# ------------------------------------------------------------------------------ +# Normalize an array for the shell use, create an array capatible for `ksh` +# or `bash` from the mount tables; this allows a single source of data to be +# used in various shells. Initializes a global array `PYTHON_MOUNT_PATHS` where each +# index contains clolon `:` delimited values about PYTHON paths. For example +# PYTHON_MOUNT_PATHS[0] has in it <index>:<version>:<path><space> where that may +# look like "1:3.8:/python3/usr/lpp/IBM/cyp/v3r8/pyz ", +# see sourced script `mounts.env` for more information. +# GLOBAL: +# ZOAU_MOUNTS +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: set_python_mount_paths +# ------------------------------------------------------------------------------ +set_python_mount_paths(){ + unset PYTHON_MOUNT_PATHS + _set_shell_array PYTHON_MOUNT_PATHS "$(echo $python_path_list_str)" +} + +# ============================================================================== +# ********************* Mount functions ********************* +# ============================================================================== + +# ------------------------------------------------------------------------------ +# Mount all data sets in the sourced mount table, check if the entries are +# already mounted, compare that to the data set being mounted, if they don't +# match, umount and mount the correct one else skip over it. +# +# GLOBAL: See arguments $1 +# ARGUMENTS: +# - $1 (variable) a global var that will be unset and initialized as an array +# - $2 (string) a string delimited by spaces used to create a global array +# OUTPUTS: None +# RETURN: None +# USAGE: _set_shell_array <var> <string> +# ------------------------------------------------------------------------------ +mount(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + + # Call helper script to have ZOAU_MOUNTS generated + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + # zoau_mounted_data_set can be empty so perform added validation + zoau_mounted_data_set=`df ${zoau_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + + # If zoau_mounted_data_set is empty or does not match expected, it means we should perform the mount + if [ "$zoau_mounted_data_set" != "$zoau_data_set" ]; then + echo "Mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." + + # If zoau_mounted_data_set not empty, compare the mount points and if they match, then unmount. + # Note, the mount point could be root (/) waitng for children so lets compare before unmounting. + if [ ! -z "${zoau_mounted_data_set}" ]; then + temp_mount=`df ${zoau_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 1` + if [ "${zoau_mount}" = "${temp_mount}" ]; then + /usr/sbin/unmount ${zoau_mount} + fi + fi + mkdir -p ${zoau_mount} + /usr/sbin/mount ${1} ${zoau_data_set} ${zoau_mount} + else + echo "ZOAU ${zoau_version} is already mounted on data set ${zoau_data_set} to path ${zoau_mount}." + fi + done + + unset python_mount + unset python_data_set + # Call helper script to have PYTHON_MOUNTS generated + set_python_mounts + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + python_mount=`echo "${tgt}" | cut -d ":" -f 4` + python_data_set=`echo "${tgt}" | cut -d ":" -f 5` + + # python_mounted_data_set can be empty so perform added validation + python_mounted_data_set=`df ${python_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + + # If python_mounted_data_set is empty or not, we will perform a mount + if [ "$python_mounted_data_set" != "$python_data_set" ]; then + echo "Mouting Python ${python_mount} on data set ${python_data_set}." + + # If python_mounted_data_set not empty, compare the mount points and if they match, then unmount. + # Note, the mount point could be root (/) waitng for children so lets compare before unmounting. + if [ ! -z "${python_mounted_data_set}" ]; then + temp_mount=`df ${python_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 1` + if [ "${python_mount}" = "${temp_mount}" ]; then + /usr/sbin/unmount ${python_mount} + fi + fi + + mkdir -p ${python_mount} + /usr/sbin/mount ${1} ${python_data_set} ${python_mount} + else + echo "Python ${python_mount} is already mounted on data set ${python_data_set}." + fi + done +} + +# ------------------------------------------------------------------------------ +# Unmount all data sets in the sourced mount table. +# ------------------------------------------------------------------------------ +unmount(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + # Call helper script to have ZOAU_MOUNTS generated + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + zoau_mounted_data_set=`df ${zoau_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + if [ "$zoau_mounted_data_set" = "$zoau_data_set" ]; then + echo "Unmouting ZOAU ${zoau_version} on data set ${zoau_data_set} from path ${zoau_mount}." + /usr/sbin/unmount ${zoau_mount} + else + echo "ZOAU ${zoau_version} is not currently mounted on data set ${zoau_data_set} to path ${zoau_mount}." + fi + done + + unset python_mount + unset python_data_set + # Call helper script to have PYTHON_MOUNTS generated + set_python_to_array + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + python_mount=`echo "${tgt}" | cut -d ":" -f 4` + python_data_set=`echo "${tgt}" | cut -d ":" -f 5` + + python_mounted_data_set=`df ${python_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + if [ "$python_mounted_data_set" = "$python_data_set" ]; then + echo "Unmouting Python ${python_mount} on data set ${python_data_set}." + /usr/sbin/unmount ${python_mount} + else + echo "Python ${python_mount} is not currently mounted on data set ${python_data_set}." + fi + done +} + +# ------------------------------------------------------------------------------ +# Remount all data sets sourced in the mount table, check if there is something +# already mounted, compare that to the data set being mounted, if they don't +# match, umount and mount the correct one else skip over it. +# ------------------------------------------------------------------------------ +remount(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + # Call helper script to have ZOAU_MOUNTS generated + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + zoau_mounted_data_set=`df ${zoau_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + # ZOAU is not mounted, perform mount + if [ ! -n "$zoau_mounted_data_set" ]; then + echo "Nothing to unmount, mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." + mkdir -p ${zoau_mount} + /usr/sbin/mount -r -t zfs -f ${zoau_data_set} ${zoau_mount} + # ZOAU is currently mounted and matches what we expect + elif [ "$zoau_mounted_data_set" = "$zoau_data_set" ]; then + echo "Unmounting ZOAU ${zoau_version} from path ${zoau_mount} on data set ${zoau_data_set}." + /usr/sbin/unmount ${zoau_mount} + echo "Mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." + mkdir -p ${zoau_mount} + /usr/sbin/mount -r -t zfs -f ${zoau_data_set} ${zoau_mount} + # What is mounted does not match our expected value, perform unmount and mount + elif [ "$zoau_mounted_data_set" != "$zoau_data_set" ]; then + echo "WARNING: Overriding existing mount ${python_mount}." + echo "Unmounting data set ${zoau_mounted_data_set} from path ${zoau_mount}." + /usr/sbin/unmount ${zoau_mount} + echo "Mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." + mkdir -p ${zoau_mount} + /usr/sbin/mount -r -t zfs -f ${zoau_data_set} ${zoau_mount} + else + echo "Unable to determine the existing mounts to remount." + fi + done + + unset python_mount + unset python_data_set + # Call helper script to have PYTHON_MOUNTS generated + set_python_to_array + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + python_mount=`echo "${tgt}" | cut -d ":" -f 4` + python_data_set=`echo "${tgt}" | cut -d ":" -f 5` + + python_mounted_data_set=`df ${python_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 2 | sed 's/(//' | sed 's/.$//'` + # Pythion is not mounted, perform mount + if [ ! -n "$python_mounted_data_set" ]; then + echo "Nothing to unmount, mouting Python ${python_version} on data set ${python_data_set} to path ${python_mount}." + mkdir -p ${python_mount} + /usr/sbin/mount -r -t zfs -f ${python_data_set} ${python_mount} + #Python is currently mounted and matches what we expect + elif [ "$python_mounted_data_set" = "$python_data_set" ]; then + echo "Unmounting Python ${python_version} from path ${python_mount} on data set ${python_data_set}." + /usr/sbin/unmount ${python_mount} + echo "Mouting Python ${python_version} on data set ${python_data_set} to path ${python_mount}." + mkdir -p ${python_mount} + /usr/sbin/mount -r -t zfs -f ${python_data_set} ${python_mount} + # What is mounted does not match our expected value, perform unmount and mount + elif [ "$python_mounted_data_set" != "$python_data_set" ]; then + echo "WARNING: Overriding existing mount ${python_mount}." + echo "Unmounting data set ${python_mounted_data_set} from path ${python_mount}." + /usr/sbin/unmount ${python_mount} + echo "Mouting Python ${python_version} on data set ${python_data_set} to path ${python_mount}." + mkdir -p ${python_mount} + /usr/sbin/mount -r -t zfs -f ${python_data_set} ${python_mount} + else + echo "Unable to determine the existing mounts to remount." + fi + done +} + + +# ============================================================================== +# ********************* Getter functions ********************* +# ============================================================================== + +get_python_mount(){ + + arg=$1 + unset PYZ_HOME + unset python_version + unset python_home + + # Set PYZ mount table to shell array types + set_python_mounts + + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + + if [ "$arg" = "$python_version" ]; then + PYZ_HOME=$python_home + fi + + done + + if [ ! "$PYZ_HOME" ]; then + echo "PYZ vesion [$arg] was not found in the mount table." + exit 1 + fi +} + + +# Get the zoau home/path given $1/arg else error +get_zoau_mount(){ + arg=$1 + unset ZOAU_HOME + unset zoau_version + unset zoau_mount + + # Set ZOAU mount table to shell array types + set_zoau_mounts + + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + + if [ "$arg" = "$zoau_version" ]; then + ZOAU_HOME=$zoau_mount + fi + + done + + if [ ! "$ZOAU_HOME" ]; then + echo "ZOAU vesion [$arg] was not found in the mount table." + exit 1 + fi +} + +# ============================================================================== +# ********************* Print functions ********************* +# ============================================================================== + +# ------------------------------------------------------------------------------ +# Print python and zoau mount tables +# ------------------------------------------------------------------------------ +print_mount_tables(){ + unset zoau_index + unset zoau_version + unset zoau_mount + unset zoau_data_set + + set_zoau_mounts + + message "Displaying z/OS Python ZOAU table." + for tgt in "${ZOAU_MOUNTS[@]}" ; do + zoau_index=`echo "${tgt}" | cut -d ":" -f 1` + zoau_version=`echo "${tgt}" | cut -d ":" -f 2` + zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` + zoau_data_set=`echo "${tgt}" | cut -d ":" -f 4` + + echo "ID:" $zoau_index + echo " Version:" $zoau_version + echo " Home:" $zoau_mount + echo " Mount:" $zoau_data_set + + done + + unset python_index + unset python_version + unset python_home + unset python_mount + unset python_data_set + + set_python_mounts + + message "Displaying z/OS Python mount table." + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_home=`echo "${tgt}" | cut -d ":" -f 3` + python_mount=`echo "${tgt}" | cut -d ":" -f 4` + python_data_set=`echo "${tgt}" | cut -d ":" -f 5` + + echo "ID:" $python_index + echo " Version:" $python_version + echo " Home:" $python_home + echo " Mount:" $python_mount + echo " Data Set:" $python_data_set + done + + unset python_index + unset python_version + unset python_path + set_python_mount_paths + message "Displaying z/OS Python path for 'pyz'" + for tgt in "${PYTHON_MOUNTS[@]}" ; do + python_index=`echo "${tgt}" | cut -d ":" -f 1` + python_version=`echo "${tgt}" | cut -d ":" -f 2` + python_path=`echo "${tgt}" | cut -d ":" -f 3` + + echo "ID:" $python_index + echo " Version:" $python_version + echo " Path:" $python_path + done + +} + + +# ============================================================================== +# ********************* Test functions ********************* +# ============================================================================== + +# ============================================================================== +# Simple method to test arrays, test automation should be designed but this +# serves as a lightweight verification test +# GLOBAL: None +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: _test_arrays +# ============================================================================== +_test_arrays(){ + echo "Current shell is: $CURR_SHELL" + + set_zoau_mounts + echo "" + echo "All ZOAU mounts are:" + echo ${ZOAU_MOUNTS[@]} + echo "ZOAU mount 3 is:" + echo ${ZOAU_MOUNTS[3]} + + set_python_mounts + echo "" + echo "All Python mounts are:" + echo ${PYTHON_MOUNTS[@]} + echo "Python mount 3 is:" + echo ${PYTHON_MOUNTS[3]} + + set_python_mount_paths + echo "" + echo "All Python paths are:" + echo ${PYTHON_MOUNT_PATHS[@]} + echo "Python path 3:" + echo ${PYTHON_MOUNT_PATHS[3]} +} + +################################################################################ +# Main arg parser +################################################################################ +case "$1" in + --mount) + mount "-r -t zfs -f" + ;; + --mount-rw) + unmount + mount "-t zfs -f" + ;; + --unmount) + unmount + ;; + --remount) + remount + ;; + --print-mount-tables) + print_mount_tables + ;; + --perform-unit-test) + _test_arrays + ;; + --val) + get_zoau_mount "1.2.1" + get_python_mount "3.10" + echo $ZOAU_HOME + echo $PYZ_HOME + ;; + *) + # If $1 exists and the script matches to $0 because when sourced this would + # thrown error and the added check is to prevent the errors when sourced. + if [ -n "$1" ]; then + if [ "$0" = "mounts-datasets.sh" ]; then + echo "ERROR: unknown parameter $1 for script $0" + fi + fi +esac diff --git a/scripts/profile-shr b/scripts/profile-shr deleted file mode 100755 index c827f3037..000000000 --- a/scripts/profile-shr +++ /dev/null @@ -1,230 +0,0 @@ -#!/bin/sh -# ============================================================================== -# Copyright (c) IBM Corporation 2022, 2023 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -################################################################################ -# Global vars -################################################################################ -zoau_version="" -python_version="" -python_path="" -bash_enabled=false - -################################################################################ -# zoau case match -################################################################################ -zoau_choice () { - case "$1" in - [a]* ) zoau_version="v1.2.0";; - [b]* ) zoau_version="v1.0.0-ga";; - [c]* ) zoau_version="v1.0.1-ga";; - [d]* ) zoau_version="v1.0.1-ptf1";; - [e]* ) zoau_version="v1.0.1-ptf2";; - [f]* ) zoau_version="v1.0.2-ga";; - [g]* ) zoau_version="v1.0.3-ga5";; - [h]* ) zoau_version="v1.0.3-ptf2";; - [i]* ) zoau_version="v1.1.0-spr";; - [j]* ) zoau_version="v1.1.0-spr5";; - [k]* ) zoau_version="v1.1.0-spr7";; - [l]* ) zoau_version="v1.1.0-ga";; - [m]* ) zoau_version="v1.1.1-ptf1";; - [n]* ) zoau_version="v1.2.0f";; - [o]* ) zoau_version="v1.2.1";; - [p]* ) zoau_version="v1.2.1-rc1";; - [q]* ) zoau_version="v1.2.1g";; - [r]* ) zoau_version="v1.2.1h";; - [s]* ) zoau_version="v1.2.2";; - [t]* ) zoau_version="latest";; - * ) echo "" - usage - ;; - esac -} - -################################################################################ -# zoau case match -################################################################################ -python_choice () { - case $1 in - [1]* ) python_version="3.8"; - python_path="/python3/usr/lpp/IBM/cyp/v3r8/pyz";; - [2]* ) python_version="3.9"; - python_path="/python2/usr/lpp/IBM/cyp/v3r9/pyz";; - [3]* ) python_version="3.10"; - python_path="/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz";; - [4]* ) python_version="3.11"; - python_path="/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz";; - *) echo "";usage;; - esac -} - -################################################################################ -# zoau case match -################################################################################ -bash_choice() { - case "$1" in - [b]* ) bash_enabled=true;; - * ) echo "";usage;; - esac -} -################################################################################ -# User input for Python -################################################################################ -usage () { - echo "" - echo "Usage: $0 [abcdefghijklmn] [123] b" - echo "ZOAU Choices:" - echo "\ta) ZOAU /zoau/v1.2.0" - echo "\tb) ZOAU /zoau/v1.0.0-ga" - echo "\tc) ZOAU /zoau/v1.0.1-ga" - echo "\td) ZOAU /zoau/v1.0.1-ptf1" - echo "\te) ZOAU /zoau/v1.0.1-ptf2" - echo "\tf) ZOAU /zoau/v1.0.2-ga" - echo "\tg) ZOAU /zoau/v1.0.3-ga5" - echo "\th) ZOAU /zoau/v1.0.3-ptf2" - echo "\ti) ZOAU /zoau/v1.1.0-spr" - echo "\tj) ZOAU /zoau/v1.1.0-spr5" - echo "\tk) ZOAU /zoau/v1.1.0-spr7" - echo "\tl) ZOAU /zoau/v1.1.0-ga" - echo "\tm) ZOAU /zoau/v1.1.1-ptf1" - echo "\tn) ZOAU /zoau/v1.2.0f" - echo "\to) ZOAU /zoau/v1.2.1" - echo "\tp) ZOAU /zoau/v1.2.1-rc1" - echo "\tq) ZOAU /zoau/v1.2.1g" - echo "\tr) ZOAU /zoau/v1.2.1h" - echo "\ts) ZOAU /zoau/v1.2.2" - echo "\tt) ZOAU /zoau/latest" - echo "" - echo "Python Choices:" - echo "\t1) Python 3.8" - echo "\t2) Python 3.9" - echo "\t3) Python 3.10" - echo "\t4) Python 3.11" - echo "" - echo "Bash shell" - echo "\tb) 'b' to enable bash shell" -} - -################################################################################ -# Message to user -################################################################################ -print_choices () { - echo "Using ZOAU version="$zoau_version - echo "Using python version="$python_version - echo "Bash = ${bash_enabled}" -} - -################################################################################ -# Configure all exports -################################################################################ -set_exports (){ - - export PATH=/bin:. - - ################################################################################ - # Set the ported tools directory on the EC, see the tools you can use, eg: - # vim, bash, etc - ################################################################################ - export TOOLS_DIR=/usr/lpp/rsusr/ported - export PATH=$PATH:$TOOLS_DIR/bin - - ################################################################################ - # Set the editor to VI - ################################################################################ - export TERM=xterm - - ################################################################################ - # Standard exports used in EBCDIC/ASCII conversion needed by tools like pyz/zoau - ################################################################################ - export _BPXK_AUTOCVT='ON' - export _CEE_RUNOPTS='FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)' - export _TAG_REDIR_ERR=txt - export _TAG_REDIR_IN=txt - export _TAG_REDIR_OUT=txt - export LANG=C - - ################################################################################ - # Set Java - ################################################################################ - export JAVA_HOME=/usr/lpp/java170/J7.0 - - ################################################################################ - # Configure Python - ################################################################################ - export PYTHON_HOME=$python_path - export PYTHON=$PYTHON_HOME/bin - export LIBPATH=$PYTHON_HOME/lib:$LIBPATH - - ################################################################################ - # ZOAU 1.0.2 or or earlier ueses ZOAU_ROOT and not ZOAU_HOME - ################################################################################ - export ZOAU_HOME=/zoau/${zoau_version} - export PATH=$ZOAU_HOME/bin:$PATH:$PYTHON:$JAVA_HOME/bin:$TOOLS_DIR/bin - export MANPATH=$MANPATH:$TOOLS_DIR/man - export ZOAU_ROOT=${ZOAU_HOME} - export PYTHONPATH=${ZOAU_HOME}/lib/:${PYTHONPATH} - export LIBPATH=${ZOAU_HOME}/lib:${LIBPATH} - - ################################################################################ - # Custom terminal configurations - ################################################################################ - # Append home directory to the current path - export PATH=$PATH:$HOME: - - # Set the prompt to display your login name & current directory - export PS1='[ $LOGNAME':'$PWD':' ]' - - ################################################################################ - # Run bash shell: - # I have have seen many issues using this version of bash to edit files on the - # EC, for example of you edit your .profile with VI under BASH, it will render - # unreable, for times I have to edit, I type exit it defaults be back into - # the zos_ssh shell which does not have any issues with VI or editing files. - # I generally use bash only for history and running commands. - ################################################################################ - if [ "{$bash_enabled}" = true ]; then - bash; - fi - - alias python="python3" - alias pip="pip3" -} -################################################################################ -# Main -################################################################################ -# User enters choices for zoau, python and bash -if [ $# -eq 3 ];then - zoau_choice $1 - python_choice $2 - bash_choice $3 - print_choices - set_exports - -# User enters choices for zoau and python, bash defaults to false -elif [ $# -eq 2 ];then - bash_enabled=false - zoau_choice $1 - python_choice $2 - print_choices - set_exports - -# User enters choice for zoau, python defaults to 3.8 and bash to false -elif [ $# -eq 1 ]; then - zoau_choice $1 - python_choice 1 - bash_enabled=false - print_choices - set_exports -else - usage -fi diff --git a/scripts/profile.sh b/scripts/profile.sh index 4a10fd3bd..a426ab868 100755 --- a/scripts/profile.sh +++ b/scripts/profile.sh @@ -18,17 +18,36 @@ # ------------------------------------------------------------------------------ CURR_SHELL=`echo $0` +# Supporting bash will take added testing, the port on z/OS has enough +# differences to warrnat temporarily disabliing the function on z/OS. More +# specifically, when using `vi` in Bash, editing becomes a problem. if [ "$CURR_SHELL" = "bash" ]; then - # Have not found a good way to exit the bash shell without ending the profile - echo "This script can not run in a bash emulator, exiting bash and and thus"\ - "you must exit this profile again." - exit 1 + if [ "$SYSTEM" = "OS/390" ]; then + echo "Script $0 can not run in 'bash', please execute in another shell." + exit 1 + fi fi # ------------------------------------------------------------------------------ # Source the known mount points # ------------------------------------------------------------------------------ -. ./mounts.sh +mounts_env="mounts.env" + +if [ -f "$mounts_env" ]; then + . ./$mounts_env +else + echo "Unable to source file: $mounts_env, exiting." + exit 1 +fi + +mount_sh="mounts.sh" + +if [ -f "$mount_sh" ]; then + . ./$mount_sh +else + echo "Unable to source file: $mount_sh, exiting." + exit 1 +fi ################################################################################ # Global vars - since ksh is the default shell and local ksh vars are defined @@ -46,6 +65,14 @@ PYTHON_PATH="" BASH_SELECTED=false +# Array where each entry is: "<index>:<version>:<mount>:<data_set>" +ZOAU_MOUNTS="" + +# Array where each entry is: "<mount>:<data_set>" +PYTHON_MOUNTS="" + +# Array where each entry is: "<index>:<version>:<path>" +PYTHON_MOUNT_PATHS="" # ****************************************************************************** # Search the array `zoau_mount_list` for a matching arg, if it matches set the # global zoau_version var to the zoau version. @@ -58,7 +85,8 @@ get_option_zoau(){ unset zoau_version unset zoau_mount unset zoau_data_set - for tgt in "${zoau_mount_list[@]}" ; do + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do zoau_index=`echo "${tgt}" | cut -d ":" -f 1` zoau_version=`echo "${tgt}" | cut -d ":" -f 2` zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` @@ -79,7 +107,8 @@ get_option_python(){ unset python_index unset python_version unset python_path - for tgt in "${python_path_list[@]}" ; do + set_python_mount_paths + for tgt in "${PYTHON_MOUNT_PATHS[@]}" ; do python_index=`echo "${tgt}" | cut -d ":" -f 1` python_version=`echo "${tgt}" | cut -d ":" -f 2` python_path=`echo "${tgt}" | cut -d ":" -f 3` @@ -110,7 +139,8 @@ help_option_zoau(){ unset zoau_data_set echo "" echo "ZOAU Options:" - for tgt in "${zoau_mount_list[@]}" ; do + set_zoau_mounts + for tgt in "${ZOAU_MOUNTS[@]}" ; do zoau_index=`echo "${tgt}" | cut -d ":" -f 1` zoau_version=`echo "${tgt}" | cut -d ":" -f 2` zoau_mount=`echo "${tgt}" | cut -d ":" -f 3` @@ -123,8 +153,9 @@ help_option_python(){ unset python_index unset python_version unset python_path + set_python_mount_paths echo "Python Options:" - for tgt in "${python_path_list[@]}" ; do + for tgt in "${PYTHON_MOUNT_PATHS[@]}" ; do python_index=`echo "${tgt}" | cut -d ":" -f 1` python_version=`echo "${tgt}" | cut -d ":" -f 2` python_path=`echo "${tgt}" | cut -d ":" -f 3` @@ -151,8 +182,8 @@ usage () { # Message to user ################################################################################ selected_option () { - echo "Using ZOAU version `zoaversion`" - echo "Using python version `python --version`" + echo "Using ZOAU version $ZOAU_VERSION" + echo "Using python version $PYTHON_VERSION" if [ "${BASH_SELECTED}" = true ]; then echo "Bash is enabled." fi @@ -256,7 +287,7 @@ elif [ $# -eq 2 ];then set_bash # Default zoau 1.2.2 and python 3.9 elif [ $# -eq 0 ]; then - get_option_zoau 19 + get_option_zoau 12 get_option_python 2 get_option_shell false set_exports @@ -267,21 +298,3 @@ elif [ "$1" = help]; then else usage fi - - -# Source should have array mount_list -xxxx(){ - unset index - unset name - unset mount_point - unset data_set - for tgt in "${zoau_mount_list[@]}" ; do - index=`echo "${tgt}" | cut -d ":" -f 1` - name=`echo "${tgt}" | cut -d ":" -f 2` - mount_point=`echo "${tgt}" | cut -d ":" -f 3` - data_set=`echo "${tgt}" | cut -d ":" -f 4` - mkdir -p ${mount_point} - echo "Mouting ZOAU ${name} on data set ${data_set} to path ${mount_point}." - /usr/sbin/mount -r -t zfs -f ${data_set} ${mount_point} - done -} \ No newline at end of file diff --git a/scripts/requirements-2.11.env b/scripts/requirements-2.11.env new file mode 100644 index 000000000..e7defb9fc --- /dev/null +++ b/scripts/requirements-2.11.env @@ -0,0 +1,35 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.11.12" +"pylint" +"six" +"voluptuous" +"yamllint" +"rstcheck" +) + +python=( +"python:3.8" +) \ No newline at end of file diff --git a/scripts/requirements-2.12.env b/scripts/requirements-2.12.env new file mode 100644 index 000000000..5052447da --- /dev/null +++ b/scripts/requirements-2.12.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.12.10" +"pylint" +"rstcheck" +) + +python=( +"python:3.8" +) \ No newline at end of file diff --git a/scripts/requirements-2.13.env b/scripts/requirements-2.13.env new file mode 100644 index 000000000..c08a7c7e9 --- /dev/null +++ b/scripts/requirements-2.13.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.13.7" +"pylint" +"rstcheck" +) + +python=( +"python:3.8" +) \ No newline at end of file diff --git a/scripts/requirements-2.14.env b/scripts/requirements-2.14.env new file mode 100644 index 000000000..9d15b3dab --- /dev/null +++ b/scripts/requirements-2.14.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.14.1" +"pylint" +"rstcheck" +) + +python=( +"python:3.9" +) \ No newline at end of file diff --git a/scripts/requirements-2.9.env b/scripts/requirements-2.9.env new file mode 100644 index 000000000..2d7d9e11b --- /dev/null +++ b/scripts/requirements-2.9.env @@ -0,0 +1,35 @@ + +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# brew install python@3.8 +# ============================================================================== + +requirements=( +"ansible:2.9.27" +"pylint:2.3.1" +"rstcheck:3.3.1" +) + +python=( +"python:3.8" +) + diff --git a/scripts/requirements-common.env b/scripts/requirements-common.env new file mode 100644 index 000000000..365b8aa4f --- /dev/null +++ b/scripts/requirements-common.env @@ -0,0 +1,133 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-common.sh and not change. This supplies the +# venv's with additional packages for use by the developement work flow. +# ============================================================================== + +# Notes, "pylint", "rstcheck", "six", "voluptuous", "yamllint" is common but +# various requirements.txt have it frozen so it becomes a double requement +# error if present here as well. +requirements=( +"alabaster" +"ansible-builder" +"ansible-lint" +"antsibull-changelog" +"astroid" +"attrs" +"Babel" +"bandit" +"bcrypt" +"bindep" +"black" +"bleach" +"bleach-allowlist" +"bracex" +"certifi" +"cffi" +"charset-normalizer" +"click" +"cryptography" +"dill" +"distlib" +"distro" +"docutils" +"filelock" +"flake8" +"GitPython" +"galaxy-importer" +"gitdb" +"idna" +"imagesize" +"importlib-metadata" +"isort" +"Jinja2" +"jsonschema" +"lazy-object-proxy" +"Markdown" +"MarkupSafe" +"markdown-it-py" +"mccabe" +"mdurl" +"mock" +"more-itertools" +"mypy-extensions" +"oyaml" +"Parsley" +"PyNaCl" +"PyYAML" +"Pygments" +"packaging" +"paramiko" +"pathspec" +"pbr" +"platformdirs" +"pluggy" +"py" +"pycodestyle" +"pycparser" +"pyflakes" +"pyparsing" +"pyrsistent" +"pytest" +"pytest-ansible" +"pytest-mock" +"pytz" +"requests" +"requirements-parser" +"resolvelib" +"rich" +"ruamel.yaml" +"ruamel.yaml.clib" +"Sphinx" +"semantic-version" +"shellescape" +"smmap" +"snowballstemmer" +"sphinx-rtd-theme" +"sphinxcontrib-devhelp" +"sphinxcontrib-htmlhelp" +"sphinxcontrib-jsmath" +"sphinxcontrib-qthelp" +"sphinxcontrib-serializinghtml" +"sphinxcontrib.applehelp" +"stevedore" +"subprocess-tee" +"tomli" +"tomlkit" +"types-setuptools" +"typing_extensions" +"urllib3" +"virtualenv" +"wcmatch" +"wcwidth" +"webencodings" +"wrapt" +"zipp" +) + +# This original list caused some issues with pytest seeing our conftest plugin +# as already registered, the only time senstive solution I could come up with +# was to pip freeze a working venv and use that as the common base for now, over +# time, using pip show <package> on each of these packages to figure out why +# this occurs or maybe using pipdeptree will visually help. +# requirements=( +# "bandit" +# "pipdeptree" +# "pytest" +# "oyaml" +# "mock" +# "pytest-ansible" +# ) \ No newline at end of file diff --git a/scripts/requirements-latest.env b/scripts/requirements-latest.env new file mode 100644 index 000000000..505ef1261 --- /dev/null +++ b/scripts/requirements-latest.env @@ -0,0 +1,31 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:latest" +) + + +python=( +"python:3.9" +) \ No newline at end of file diff --git a/scripts/venv.sh b/scripts/venv.sh new file mode 100755 index 000000000..5ec946c49 --- /dev/null +++ b/scripts/venv.sh @@ -0,0 +1,585 @@ + +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# TODO: Need to add more global vars as some are scoped to fucntions and hidden +# from view. +# ------------------------------------------------------------------------------ +# Source +# ------------------------------------------------------------------------------ +cd $(dirname $0) +VERSION_PYTHON_MIN=3.9 +VERSION_PYTHON="0" +VERSION_PYTHON_PATH="" +DIVIDER="====================================================================" +VENV_HOME_MANAGED=${PWD%/*}/venv +# Array where each entry is: "<index>:<version>:<mount>:<data_set>" +HOSTS_ALL="" + +# hosts_env="hosts.env" + +# if [ -f "$hosts_env" ]; then +# . ./$hosts_env +# else +# echo "Unable to source file: $hosts_env, exiting." +# exit 1 +# fi + +mount_sh="mounts.sh" + +if [ -f "$mount_sh" ]; then + . ./$mount_sh +else + echo "Unable to source file: $mount_sh, exiting." + exit 1 +fi + +################################################################################ +# Converts the requirements array into a exported single line delimited with +# '\\\n' so that it can be echo'd into a requirements.txt. For example in the make +# file `echo "${REQ}">$(VENV)/requirements.txt` returns a string: +# "ansible-core==2.11.12;\\\nastroid==2.12.11;\nattrs==22.1.0;.." +# If you want echo this to a file you will need to do something like: +# X=$(./make.env --req) +# echo -e $X>requirements.txt +# Or a one-iner: echo -e $(./make.env --req)>requirements.txt +################################################################################ +export_requirements(){ + unset REQ + export REQ + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + REQ=${REQ}"$key==$value;\n" + done +} + +################################################################################ +# Converts the requirements array into a single line delimited with '\\\n' +# so that it can be echo'd into a file. For example in the make +# file `echo "${REQ}">$(VENV)/requirements.txt`. +################################################################################ +echo_requirements(){ + + unset requirements_common + unset requirements + requirements_common="requirements-common.env" + unset REQ_COMMON + + if [ -f "$requirements_common" ]; then + . ./$requirements_common + else + echo "Unable to source file: $requirements_common, exiting." + exit 1 + fi + + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + if [ "$key" = "$value" ]; then + REQ_COMMON=${REQ_COMMON}"$key;\\n" + elif [ -z "$value" ]; then + REQ_COMMON=${REQ_COMMON}"$key;\\n" + else + REQ_COMMON=${REQ_COMMON}"$key==$value;\\n" + fi + done + + #for file in `ls requirements-*.sh`; do + for file in `ls requirements-[0-9].[0-9]*.env`; do + # Unset the vars from any prior sourced files + unset REQ + unset requirements + unset venv + # Soure the file + if [ -f "$file" ]; then + . ./$file + else + echo "Unable to source file: $file." + fi + + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + echo $venv_name + + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + if [ "$key" = "$value" ]; then + REQ=${REQ}"$key;\\n" + elif [ -z "$value" ]; then + REQ=${REQ}"$key;\\n" + else + REQ=${REQ}"$key==$value;\\n" + fi + done + echo "${REQ}""${REQ_COMMON}" + + py_req="0" + for ver in "${python[@]}" ; do + key=${ver%%:*} + value=${ver#*:} + py_req="${value}" + done + echo "${py_req}" + done +} + + +# Lest normalize the version from 3.10.2 to 3010002000 +# Do we we need that 4th octet? +normalize_version() { + echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; +} + +make_venv_dirs(){ + # VENV's control are under this script which is to create them the GitHub + # project root (../venv/), this is because we want this to be managed such + # that direcotry `../venv` is defined in .gitignore and galaxy.yml + # (build_ignore) to avoid having them pulled in by any build process. + + # We should think about the idea of allowing: + # --force, --synch, --update thus not sure we need this method and better to + # manage this logic inline to write_req + for file in `ls requirements-[0-9].[0-9]*.env`; do + # eg extract 2.14 from requirements-2.14.sh file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + #echo $venv_name + mkdir -p "${VENV_HOME_MANAGED}"/"${venv_name}" + done +} + +write_requirements(){ + option_pass=$1 + unset requirements_common + unset requirements + unset REQ + unset REQ_COMMON + requirements_common_file="requirements-common.env" + + # Source the requirements file for now, easy way to do this. Exit may not + # not be needed but leave it for now. + if [ -f "$requirements_common_file" ]; then + . ./$requirements_common_file + else + echo "Unable to source file: $requirements_common_file, exiting." + exit 1 + fi + + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + if [ "$key" = "$value" ]; then + REQ_COMMON=${REQ_COMMON}"$key;\\n" + elif [ -z "$value" ]; then + REQ_COMMON=${REQ_COMMON}"$key;\\n" + else + REQ_COMMON=${REQ_COMMON}"$key==$value;\\n" + fi + done + + #for file in `ls requirements-*.sh`; do + for file in `ls requirements-[0-9].[0-9]*.env`; do + # Unset the vars from any prior sourced files + unset REQ + unset requirements + unset venv + # Soure the file + if [ -f "$file" ]; then + . ./$file + else + echo "Unable to source file: $file." + fi + + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + echo $venv_name + + for pkg in "${requirements[@]}" ; do + key=${pkg%%:*} + value=${pkg#*:} + #REQ=${REQ}"$key==$value;\\n" + if [ "$key" = "$value" ]; then + REQ=${REQ}"$key;\\n" + elif [ -z "$value" ]; then + REQ=${REQ}"$key;\\n" + else + REQ=${REQ}"$key==$value;\\n" + fi + done + + py_req="0" + for ver in "${python[@]}" ; do + key=${ver%%:*} + value=${ver#*:} + py_req="${value}" + done + + # Is the discoverd python >= what the requirements.txt requires? + if [ $(normalize_version $VERSION_PYTHON) -ge $(normalize_version $py_req) ]; then + echo "${REQ}${REQ_COMMON}">"${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt + cp mounts.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ + #cp info.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ + #cp info.env.axx "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp mounts.sh "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp hosts.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp venv.sh "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp profile.sh "${VENV_HOME_MANAGED}"/"${venv_name}"/ + + # Decrypt file + if [ "$option_pass" ]; then + touch "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env + # Probably can be a 600 - needs testing + chmod 700 "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env + #echo "${option_pass}" | openssl bf -d -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin + echo "${option_pass}" | openssl enc -d -aes-256-cbc -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin + fi + else + echo "Not able to create managed venv path: ${VENV_HOME_MANAGED}/${venv_name} , min python required is ${py_req}, found version $VERSION_PYTHON" + echo "Consider installing another Python for your system, if on Mac 'brew install python@3.10', otherwise review your package manager" + rm -rf "${VENV_HOME_MANAGED}"/"${venv_name}"/ + fi + done +} + + +create_venv_and_pip_install_req(){ + + for file in `ls requirements-[0-9].[0-9]*.env`; do + unset venv + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + echo $venv_name + + if [ -f $VENV_HOME_MANAGED/$venv_name/requirements.txt ]; then + echo ${DIVIDER} + echo "Creating python virtual environment: ${VENV_HOME_MANAGED}/${venv_name}." + echo ${DIVIDER} + ${VERSION_PYTHON_PATH} -m venv "${VENV_HOME_MANAGED}"/"${venv_name}"/ + ${VENV_HOME_MANAGED}/${venv_name}/bin/pip3 install --upgrade pip + ${VENV_HOME_MANAGED}/${venv_name}/bin/pip install --upgrade pip + "${VENV_HOME_MANAGED}"/"${venv_name}"/bin/pip3 install -r "${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt + else + echo "Virtual environment "${VENV_HOME_MANAGED}"/"${venv_name}" already exists, no changes made."; \ + fi + done +} + + +find_in_path() { + result="" + IFS=: + for x in $PATH; do + if [ -x "$x/$1" ]; then + result=${result}" $x/$1" + fi + done + echo $result +} + + + +# Find the most recent python in a users path +discover_python(){ + # Don't use which, it only will find first in path within script + # for python_found in `which python3 | cut -d" " -f3`; do + pys=("python3.8" "python3.9" "python3.10" "python3.11" "python3.12" "python3.13" "python3.14") + #pys=("python3.8" "python3.9") + for py in "${pys[@]}"; do + for python_found in `find_in_path $py`; do + ver=`${python_found} --version | cut -d" " -f2` + ver_path="$python_found" + echo "Found $ver_path" + done + + + if [ $(normalize_version $ver) -ge $(normalize_version $VERSION_PYTHON) ]; then + VERSION_PYTHON="$ver" + VERSION_PYTHON_PATH="$ver_path" + fi + + done + + echo ${DIVIDER} + echo "Discovered Python version: ${VERSION_PYTHON}." + echo "Discovered Python path: ${VERSION_PYTHON_PATH}." + echo ${DIVIDER} +} +################################################################################ +# Return Python HOME path when given a key that is contained in the zoau array. +################################################################################ + +get_pyz(){ + set_python_mount_paths + arg=$1 + unset PYZ + echo ${PYTHON_MOUNT_PATHS[@]} + for py in "${PYTHON_MOUNT_PATHS[@]}" ; do + key=${py%%:*} + value=${py#*:} + if [ "$key" = "$arg" ]; then + PYZ="$value" + fi + done +} + +################################################################################ +# Echo Python HOME path when given a key that is contained in the zoau array. +################################################################################ +echo_pyz(){ + get_pyz $1 + echo "${PYZ}" +} + +################################################################################ +# Return ZOAU HOME path when given a key that is contained in the zoau array. +################################################################################ +get_zoau(){ + arg=$1 + unset ZOAU + for zo in "${zoau[@]}" ; do + key=${zo%%:*} + value=${zo#*:} + if [ "$key" = "$arg" ]; then + ZOAU="$value" + fi + done +} + +################################################################################ +# Echo ZOAU HOME path when given a key that is contained in the zoau array. +################################################################################ +echo_zoau(){ + get_zoau $1 + echo "${ZOAU}" +} + +latest_venv(){ + dir_version_latest="0" + test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` + + if [ ! -z "$test_for_managed_venv" ]; then + for dir_version in `ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* | cut -d"-" -f2`; do + if [ $(normalize_version $dir_version) -ge $(normalize_version $dir_version_latest) ]; then + dir_version_latest=$dir_version + fi + done + echo "${VENV_HOME_MANAGED}"/"venv-"$dir_version_latest + fi +} + + + +# ============================================================================== +# Public function that initializes a global array `ZOAU_MOUNTS` where each index +# contains clolon `:` delimited values about ZOAU mounts. For example +# ZOAU_MOUNTS[0] has in it <index>:<version>:<mount>:<data_set> where that may +# look like "1:v1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS", see sourced script +# `mounts.env` for more information. +# GLOBAL: ZOAU_MOUNTS +# ARGUMENTS: None +# OUTPUTS: None +# RETURN: None +# USAGE: set_zoau_mounts +# ============================================================================== +set_hosts_to_array(){ + + # Source the envrionment file here rather than at the top of this script. + # If you source it to early it will trigger the condtion below that was + # removed from info.env. + if [ -f "info.env" ]; then + . ./info.env + else # check if the env varas instead have been exported + if [ -z "$USER" ] || [ -z "$PASS" ] || [ -z "$HOST_SUFFIX" ]; then + echo "This configuration requires either 'info.env' exist or environment vars for the z/OS host exist and be exported." + echo "Export and set vars: 'USER', 'PASS' and'HOST_SUFFIX', or place them in a file named info.env." + exit 1 + fi + fi + + hosts_env="hosts.env" + + if [ -f "$hosts_env" ]; then + . ./$hosts_env + else + echo "Unable to source file: $hosts_env, exiting." + exit 1 + fi + + _set_shell_array HOSTS_ALL "$(echo $host_list_str)" +} + + +################################################################################ +# Host list details used by the function `get_config` to generate +# a collections configuration. Keys can be an ECs hostname or a users laptop +# user name which is the same as what `whoami` returns. +# Using word spliting to split the values into an array, for example +# temp_array=(${tgt//:/ }) translates to ${string//substring/replacement}, thus +# all ':' are matched and replaced with a ' ' and then you have +# (element1 element2 ... elementN) to initialize the array. +################################################################################ + +get_host_ids(){ + set_hosts_to_array + unset host_index + unset host_prefix + for tgt in "${HOSTS_ALL[@]}" ; do + host_index=`echo "${tgt}" | cut -d ":" -f 1` + host_prefix=`echo "${tgt}" | cut -d ":" -f 2` + + echo "ID: $host_index Host: $host_prefix" + done +} + +# Should renane this with a prefix of set_ to make it more readable +ssh_host_credentials(){ + arg=$1 + unset host + unset user + unset pass + + # Call helper script to have ZOAU_MOUNTS generated + set_hosts_to_array + for tgt in "${HOSTS_ALL[@]}" ; do + key=`echo "${tgt}" | cut -d ":" -f 1` + if [ "$key" = "$arg" ]; then + host=`echo "${tgt}" | cut -d ":" -f 2` + user=`echo "${tgt}" | cut -d ":" -f 3` + pass=`echo "${tgt}" | cut -d ":" -f 4` + fi + done +} + +################################################################################ +# Copy a users key to a remote target to be a known host, if the host has a cert +# field in the host_list not equal to none, it will also be copied for jenkins +################################################################################ +ssh_copy_key(){ + sshpass -p "${pass}" ssh-copy-id -o StrictHostKeyChecking=no -i ~/.ssh/id_rsa.pub "${user}"@"${host}" &> /dev/null + + if [ ! -z "$SSH_KEY_PIPELINE" ]; then + echo "${SSH_KEY_PIPELINE}" | ssh "${user}"@"${host}" "mkdir -p ~/.ssh && cat >> ~/.ssh/authorized_keys" + else + echo "This is optional, if you define and export 'SSH_KEY_PIPELINE', the z/OS host can be authenticated with additonal keys such as a pipeline." + fi +} + +################################################################################ +# Scp some scripts to the remote host and execute them. +################################################################################ +ssh_copy_files_and_mount(){ + scp -O "$1" "$2" "$3" "${user}"@"${host}":/u/"${user}" + ssh "${user}"@"${host}" "cd /u/"${user}"; chmod 755 *.sh; ./mounts.sh --mount; exit;" +} + +################################################################################ +# Echo the configuration used by the ansible core python test framework +################################################################################ +echo_config(){ +unset CONFIG + +CONFIG=${CONFIG}"host: ${host}\\\n" +CONFIG=${CONFIG}"user: ${user}\\\n" +CONFIG=${CONFIG}"python_path: ${PYZ_HOME}/bin/python3\\\n" +CONFIG=${CONFIG}"\\\n" +CONFIG=${CONFIG}"environment:\\\n" +CONFIG=${CONFIG}" _BPXK_AUTOCVT: \"ON\"\\\n" +CONFIG=${CONFIG}" _CEE_RUNOPTS: \"'FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)'\"\\\n" +CONFIG=${CONFIG}" _TAG_REDIR_ERR: txt\\\n" +CONFIG=${CONFIG}" _TAG_REDIR_IN: txt\\\n" +CONFIG=${CONFIG}" _TAG_REDIR_OUT: txt\\\n" +CONFIG=${CONFIG}" LANG: C\\\n" +CONFIG=${CONFIG}" ZOAU_HOME: ${ZOAU_HOME}\\\n" +CONFIG=${CONFIG}" LIBPATH: ${ZOAU_HOME}/lib:${PYZ_HOME}/lib:/lib:/usr/lib:.\\\n" +CONFIG=${CONFIG}" PYTHONPATH: ${ZOAU_HOME}/lib\\\n" +CONFIG=${CONFIG}" PATH: ${ZOAU_HOME}/bin:${PYZ_HOME}/bin:/bin:/usr/sbin:/var/bin\\\n" +CONFIG=${CONFIG}" PYTHONSTDINENCODING: \"cp1047\"\\n" + +echo ${CONFIG} +} + +write_test_config(){ +unset CONFIG +host_zvm=$1 +pyz_version=$2 +zoau_version=$3 +managed_venv_path=$4 + +ssh_host_credentials "$host_zvm" +get_python_mount "$pyz_version" +get_zoau_mount "$zoau_version" + +CONFIG=${CONFIG}"host: ${host}\\n" +CONFIG=${CONFIG}"user: ${user}\\n" +CONFIG=${CONFIG}"python_path: ${PYZ_HOME}/bin/python3\\n" +CONFIG=${CONFIG}"\\n" +CONFIG=${CONFIG}"environment:\\n" +CONFIG=${CONFIG}" _BPXK_AUTOCVT: \"ON\"\\n" +CONFIG=${CONFIG}" _CEE_RUNOPTS: \"'FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)'\"\\n" +CONFIG=${CONFIG}" _TAG_REDIR_IN: txt\\n" +CONFIG=${CONFIG}" _TAG_REDIR_OUT: txt\\n" +CONFIG=${CONFIG}" LANG: C\\n" +CONFIG=${CONFIG}" ZOAU_HOME: ${ZOAU_HOME}\\n" +CONFIG=${CONFIG}" LIBPATH: ${ZOAU_HOME}/lib:${PYZ_HOME}/lib:/lib:/usr/lib:.\\n" +CONFIG=${CONFIG}" PYTHONPATH: ${ZOAU_HOME}/lib\\n" +CONFIG=${CONFIG}" PATH: ${ZOAU_HOME}/bin:${PYZ_HOME}/bin:/bin:/usr/sbin:/var/bin\\n" +CONFIG=${CONFIG}" PYTHONSTDINENCODING: \"cp1047\"\\n" + +echo $CONFIG>$managed_venv_path/config.yml +} + +################################################################################ +# Main arg parser +################################################################################ + +case "$1" in +--cert) + ssh_host_credentials $2 + ssh_copy_key + ;; +--host-setup-files) #ec33017a "mounts.env" "mounts.sh" "shell-helper.sh" "profile.sh" + ssh_host_credentials $2 + ssh_copy_files_and_mount $3 $4 $5 + ;; +--targets) + get_host_ids + ;; +--config) + write_test_config $2 $3 $4 $5 + ;; +--disc) + discover_python + ;; +--vsetup) + discover_python + make_venv_dirs + #echo_requirements + write_requirements $3 + create_venv_and_pip_install_req + ;; +--latest_venv) + latest_venv + ;; +--perform-unit-test) + discover_python + #make_venv_dirs + echo_requirements + #write_requirements $3 + ;; +*) + echo "ERROR: unknown parameter $1" + ;; +esac From 42a805aea1d70c40c4b395f7a1a56dd3e6c2d379 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 26 May 2023 19:11:58 -0400 Subject: [PATCH 109/495] 347 new query fields (#778) * changing job.py to return 7 more fields, and for zos_job_query to pass them through * corrected testing to pull all new values through this assumes zoau 1.2.3 and z/OS at least 2.4 need to test older zoau to make sure this will still work * Added zoau version testing import to job.py so it won't reach for non-existent members. * pep8 and lint required changes * changed test to see if it will pass unit testing * Modified test_zos_data_set_func to skip HFS test if zOS > 02.04 * changed OS test for hfs usage * corrected usage of 'hosts'... removed the definition in prior edit. * changing OS version checker * corrected string extraction for OS version checker * added delete shell to 196/197 (finally of cat/uncat test) removed success message from 830 (version test logic) * removed the mvscmdauth call, as it coincides with some new test failures. * added changed=false back into testing of job_query * correction of zos->zoau name in comments. --- plugins/module_utils/job.py | 22 +++++- plugins/modules/zos_job_query.py | 48 +++++++++++ plugins/modules/zos_job_submit.py | 36 +++++++++ .../modules/test_zos_data_set_func.py | 79 +++++++++++-------- 4 files changed, 153 insertions(+), 32 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 94909aba4..d987d5a52 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -31,6 +31,11 @@ list_dds = MissingZOAUImport() listing = MissingZOAUImport() +try: + from zoautil_py import ZOAU_API_VERSION +except Exception: + ZOAU_API_VERSION = "1.2.0" + def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. @@ -200,6 +205,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # jls output: owner=job[0], name=job[1], id=job[2], status=job[3], rc=job[4] # e.g.: OMVSADM HELLO JOB00126 JCLERR ? # listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not + # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers + # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] + # creationdate=job[9] creationtime=job[10] queueposition=job[11] final_entries = [] entries = listing(job_id=job_id_temp) @@ -232,13 +240,25 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"] = {} job["ret_code"]["msg"] = entry.status + " " + entry.rc job["ret_code"]["msg_code"] = entry.rc - # Why was this set to an empty string? + job["ret_code"]["code"] = None if len(entry.rc) > 0: if entry.rc.isdigit(): job["ret_code"]["code"] = int(entry.rc) job["ret_code"]["msg_text"] = entry.status + # this section only works on zoau 1.2.3 vvv + + if ZOAU_API_VERSION > "1.2.2": + job["job_class"] = entry.job_class + job["svc_class"] = entry.svc_class + job["priority"] = entry.priority + job["asid"] = entry.asid + job["creation_datetime"] = entry.creation_datetime + job["queue_position"] = entry.queue_position + + # this section only works on zoau 1.2.3 ^^^ + job["class"] = "" job["content_type"] = "" job["ret_code"]["steps"] = [] diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 28d38b727..cb9a28a53 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -174,6 +174,36 @@ } ] } + job_class: + description: + Letter indicating job class for this job. + type: str + sample: A + svc_class: + description: + Character indicating service class for this job. + type: str + sample: C + priority: + description: + A numeric indicator of the job priority assigned through JES. + type: int + sample: 4 + asid: + description: + An identifier created by JES. + type: int + sample: 0 + creation_datetime: + description: + Date and time, local to the target system, when the job was created. + type: str + sample: 20230504T141500 + queue_position: + description: + Integer of the position within the job queue where this jobs resided. + type: int + sample: 3 sample: [ { @@ -181,12 +211,24 @@ "owner": "ADMIN", "job_id": "JOB01427", "ret_code": "null", + "job_class": "K", + "svc_class": "?", + "priority": 1, + "asid": 0, + "creation_datetime": "20230503T121300", + "queue_position": 3, }, { "job_name": "LINKCBL", "owner": "ADMIN", "job_id": "JOB16577", "ret_code": { "msg": "CANCELED", "code": "null" }, + "job_class": "A", + "svc_class": "E", + "priority": 0, + "asid": 4, + "creation_datetime": "20230503T121400", + "queue_position": 0, }, ] message: @@ -354,6 +396,12 @@ def parsing_jobs(jobs_raw): "system": job.get("system"), "subsystem": job.get("subsystem"), "ret_code": ret_code, + "job_class": job.get("job_class"), + "svc_class": job.get("svc_class"), + "priority": job.get("priority"), + "asid": job.get("asid"), + "creation_datetime": job.get("creation_datetime"), + "queue_position": job.get("queue_position"), } jobs.append(job_dict) return jobs diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index a58e138a1..97cbbc4a7 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -276,6 +276,36 @@ }, ] } + job_class: + description: + Letter indicating job class for this job. + type: str + sample: A + svc_class: + description: + Character indicating service class for this job. + type: str + sample: C + priority: + description: + A numeric indicator of the job priority assigned through JES. + type: int + sample: 4 + asid: + description: + An identifier created by JES. + type: int + sample: 0 + creation_datetime: + description: + Date and time, local to the target system, when the job was created. + type: str + sample: 20230504T141500 + queue_position: + description: + Integer of the position within the job queue where this jobs resided. + type: int + sample: 3 sample: [ { @@ -489,6 +519,12 @@ } ] }, + "job_class": "K", + "svc_class": "?", + "priority": 1, + "asid": 0, + "creation_datetime": "20230503T121300", + "queue_position": 3, "subsystem": "STL1" } ] diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 118fdcc18..c4833aa56 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -152,6 +152,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) results = hosts.all.zos_job_submit( @@ -809,42 +810,58 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): ["HFS", "ZFS"], ) def test_filesystem_create_and_mount(ansible_zos_module, filesystem): + fulltest = True + hosts = ansible_zos_module + try: - hosts = ansible_zos_module hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, type=filesystem) - temp_dir_name = make_tempfile(hosts, directory=True) - results2 = hosts.all.command( - cmd="mount -t {0} -f {1} {2}".format( - filesystem, DEFAULT_DATA_SET_NAME, temp_dir_name + + if filesystem == "HFS": + result0 = hosts.all.shell(cmd="zinfo -t sys") + for result in result0.contacted.values(): + sys_info = result.get("stdout_lines") + product_version = sys_info[4].split()[1].strip("'") + product_release = sys_info[5].split()[1].strip("'") + if product_release >= "05" or product_version > "02": + fulltest = False + print( "skipping HFS test: zOS > 02.04" ) + + if fulltest: + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, type=filesystem) + temp_dir_name = make_tempfile(hosts, directory=True) + results2 = hosts.all.command( + cmd="mount -t {0} -f {1} {2}".format( + filesystem, DEFAULT_DATA_SET_NAME, temp_dir_name + ) ) - ) - results3 = hosts.all.shell(cmd="cd {0} ; df .".format(temp_dir_name)) + results3 = hosts.all.shell(cmd="cd {0} ; df .".format(temp_dir_name)) - # clean up - results4 = hosts.all.command(cmd="unmount {0}".format(temp_dir_name)) - results5 = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - results6 = hosts.all.file(path=temp_dir_name, state="absent") + # clean up + results4 = hosts.all.command(cmd="unmount {0}".format(temp_dir_name)) + results5 = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + results6 = hosts.all.file(path=temp_dir_name, state="absent") - for result in results.contacted.values(): - assert result.get("changed") is True - assert result.get("module_stderr") is None - for result in results2.contacted.values(): - assert result.get("changed") is True - assert result.get("stderr") == "" - for result in results3.contacted.values(): - assert result.get("changed") is True - assert result.get("stderr") == "" - assert DEFAULT_DATA_SET_NAME.upper() in result.get("stdout", "") - for result in results4.contacted.values(): - assert result.get("changed") is True - assert result.get("stderr") == "" - for result in results5.contacted.values(): - assert result.get("changed") is True - assert result.get("module_stderr") is None - for result in results6.contacted.values(): - assert result.get("changed") is True - assert result.get("module_stderr") is None + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + for result in results2.contacted.values(): + assert result.get("changed") is True + assert result.get("stderr") == "" + for result in results3.contacted.values(): + assert result.get("changed") is True + assert result.get("stderr") == "" + assert DEFAULT_DATA_SET_NAME.upper() in result.get("stdout", "") + for result in results4.contacted.values(): + assert result.get("changed") is True + assert result.get("stderr") == "" + for result in results5.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + for result in results6.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None finally: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") From eb1ef33ba4cdc7513afedb5ef0f5d931e481554b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 29 May 2023 10:21:14 -0600 Subject: [PATCH 110/495] Missing fragment in PR 778 New query fields (#780) * added fragment for pr 778 * Added changelog fragment query new fields Added changelog fragment query new fields * Update 778-query-new-fields.yml --- changelogs/fragments/778-query-new-fields.yml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 changelogs/fragments/778-query-new-fields.yml diff --git a/changelogs/fragments/778-query-new-fields.yml b/changelogs/fragments/778-query-new-fields.yml new file mode 100644 index 000000000..9f2c71579 --- /dev/null +++ b/changelogs/fragments/778-query-new-fields.yml @@ -0,0 +1,5 @@ +minor_changes: +- zos_job_query - Adds new fields job_class, svc_class, priority, asid, + creation_datetime, and queue_position to the return output when querying + or submitting a job. Available when using ZOAU v1.2.3 or greater. + (https://github.com/ansible-collections/ibm_zos_core/pull/778) From 40dab5ed63fd64ee0bc062a168d403cf997b4f6c Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Wed, 31 May 2023 12:05:26 -0700 Subject: [PATCH 111/495] Update docs with ansible/ansible-core version, AAP and fix the dated git issue templates (#771) * Doc vesion updates Signed-off-by: ddimatos <dimatos@gmail.com> * Repository template updates and future proofing Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Formatting corrections for release notes Signed-off-by: ddimatos <dimatos@gmail.com> * Upate issue templates with newer version of software Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 23 ++++++++--- .../ISSUE_TEMPLATE/collaboration_issue.yml | 23 ++++++++--- .github/ISSUE_TEMPLATE/doc_issue.yml | 40 ++++++++----------- .github/ISSUE_TEMPLATE/enabler_issue.yml | 2 +- .../enhancement_feature.issue.yml | 2 +- .github/ISSUE_TEMPLATE/module_issue.yml | 2 +- README.md | 11 ++++- changelogs/771-update-ansible-version.yaml | 7 ++++ docs/source/release_notes.rst | 11 +++-- 9 files changed, 80 insertions(+), 41 deletions(-) create mode 100644 changelogs/771-update-ansible-version.yaml diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 359add494..8a1cd3ccd 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -28,6 +28,9 @@ body: description: Which version of ZOAU are you using? multiple: false options: + - v1.2.5 + - v1.2.4 + - v1.2.3 - v1.2.2 - v1.2.1 - v1.2.0 @@ -42,6 +45,9 @@ body: description: Which version of IBM Enterprise Python are you using? multiple: false options: + - v3.14.x + - v3.13.x + - v3.12.x - v3.11.x - v3.10.x - v3.9.x @@ -55,13 +61,17 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 + - v1.8.0-beta.1 + - v1.7.0 + - v1.7.0-beta.1 + - v1.6.0 + - v1.6.0-beta.1 - v1.5.0 - - v1.4.0 + - v1.4.1 - v1.3.6 - - v1.3.5 - - v1.3.3 - - v1.3.1 - - v1.3.0 - v1.2.1 - v1.1.0 - v1.0.0 @@ -75,6 +85,8 @@ body: multiple: false options: - latest + - v2.16.x + - v2.15.x - v2.14.x - v2.13.x - v2.12.x @@ -89,6 +101,7 @@ body: description: What is the version of z/OS on the managed node? multiple: false options: + - v3.1 - v2.5 - v2.4 - v2.3 diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index bf6db4778..c9ac9f151 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -40,6 +40,9 @@ body: description: Which version of ZOAU are you using? multiple: false options: + - v1.2.5 + - v1.2.4 + - v1.2.3 - v1.2.2 - v1.2.1 - v1.2.0 @@ -54,6 +57,9 @@ body: description: Which version of IBM Enterprise Python are you using? multiple: true options: + - v3.14.x + - v3.13.x + - v3.12.x - v3.11.x - v3.10.x - v3.9.x @@ -67,13 +73,17 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 + - v1.8.0-beta.1 + - v1.7.0 + - v1.7.0-beta.1 + - v1.6.0 + - v1.6.0-beta.1 - v1.5.0 - - v1.4.0 + - v1.4.1 - v1.3.6 - - v1.3.5 - - v1.3.3 - - v1.3.1 - - v1.3.0 - v1.2.1 - v1.1.0 - v1.0.0 @@ -87,6 +97,8 @@ body: multiple: false options: - latest + - v2.16.x + - v2.15.x - v2.14.x - v2.13.x - v2.12.x @@ -101,6 +113,7 @@ body: description: What is the version of z/OS on the managed node? multiple: false options: + - v3.1 - v2.5 - v2.4 - v2.3 diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index 07ddbc40e..5583ce5c1 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -1,5 +1,5 @@ name: Report a documentation issue -description: Request that documentation be reviewed. Complete all required fields. +description: Request that documentation be reviewed. Complete all required fields. title: "[Documentation] <title> " labels: [Documentation] assignees: @@ -27,35 +27,29 @@ body: 5. Include browser or shell if applicable validations: required: true - - type: textarea - id: ansible-version - attributes: - label: Ansible version - description: What is the version of Ansible on the controller if applicable. - placeholder: Paste verbatim output from `ansible --version`. - render: SHELL - validations: - required: false - type: dropdown id: collection-version attributes: label: IBM z/OS Ansible core Version - description: | - Which version of the z/OS Ansible core collection are you using. If you are unsure, run the command: `cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json | grep version` - multiple: true + description: Which version of z/OS Ansible core collection are you reporting a documentation bug. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + multiple: false options: - - v1.0.0 - - v1.1.0 - - v1.2.1 - - v1.3.0 - - v1.3.1 - - v1.3.3 - - v1.3.5 + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 + - v1.8.0-beta.1 + - v1.7.0 + - v1.7.0-beta.1 + - v1.6.0 + - v1.6.0-beta.1 + - v1.5.0 + - v1.4.1 - v1.3.6 - - v1.4.0-beta.1 - - v1.4.0-beta.2 + - v1.2.1 + - v1.1.0 + - v1.0.0 validations: - required: true + required: false - type: dropdown id: modules attributes: diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index 37131e500..abc9f16c2 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -1,7 +1,7 @@ name: Enabler task description: | Identify a development task that does not correspond to other git issue types, eg this could be a pipeline task. - Complete all required fields. + Complete all required fields. title: "[Enabler] <title> " labels: [Enabler] assignees: diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index d39840872..f5bc9325f 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -1,5 +1,5 @@ name: Request an enhancement or new feature -description: Request a new feature or an enhancement. Complete all required fields. +description: Request a new feature or an enhancement. Complete all required fields. title: "[Enhancement] <title> " labels: [Enhancement] assignees: diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml index beea537e9..a7e7dcfa1 100644 --- a/.github/ISSUE_TEMPLATE/module_issue.yml +++ b/.github/ISSUE_TEMPLATE/module_issue.yml @@ -1,5 +1,5 @@ name: Request a new module -description: Request a new module be added to the collection. Complete all required fields. +description: Request a new module be added to the collection. Complete all required fields. title: "[Module] <title> " labels: [Module] assignees: diff --git a/README.md b/README.md index d6505759b..756f06d92 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,16 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against the following Ansible versions: >=2.9,<2.15. +This collection has been tested against **Ansible** and **Ansible Core** versions >=2.9,<2.15. +The Ansible and Ansible Core versions supported for this collection align to the +[ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the +[Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** +and **ansible-core**. + +For **Ansible Automation Platform** (AAP) users, review the +[Ansible Automation Platform Certified Content](https://access.redhat.com/articles/3642632) +and [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform) +for more more information on supported versions of Ansible. Copyright ========= diff --git a/changelogs/771-update-ansible-version.yaml b/changelogs/771-update-ansible-version.yaml new file mode 100644 index 000000000..92354841b --- /dev/null +++ b/changelogs/771-update-ansible-version.yaml @@ -0,0 +1,7 @@ +trivial: +- doc - Updated the documentation in the README and release_notes.rst to reflect + ansible, ansible-core, Automation Hub and z/OS version. + (https://github.com/ansible-collections/ibm_zos_core/pull/771) +- templates - Update the git issue templates with current and + future product versions. + (https://github.com/ansible-collections/ibm_zos_core/pull/771) \ No newline at end of file diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index d897feef4..1e211ec89 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -27,12 +27,14 @@ Bugfixes -------- - ``zos_copy`` + - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. - Fixes a bug where the module would change the mode for a directory when copying in the contents of another directory. - Fixes a bug where the incorrect encoding would be used during normalization, particularly when processing newlines in files. - ``zos_encode`` - Fixes a bug where converted files were not tagged with the new code set afterwards. - ``zos_find`` - Fixes a bug where the module would stop searching and exit after the first value in a list was not found. - ``zos_lineinfile`` + - Removes use of Python f-string to ensure support for Python 2.7 on the controller. - Fixes a bug where an incorect error message would be raised when a USS source was not found. @@ -46,7 +48,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS Version`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -163,7 +165,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS Version`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -172,8 +174,7 @@ Version 1.4.1 ============= Bug fixes - --------------------------- +--------- * ``zos_copy`` @@ -856,6 +857,8 @@ Reference https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS V2R3: https://www.ibm.com/support/knowledgecenter/SSLTBW_2.3.0/com.ibm.zos.v2r3/en/homepage.html +.. _z/OS Version: + https://www.ibm.com/docs/en/zos .. _FAQs: https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html From ca6edd2d983004ee2bca32824da814b427864473 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 2 Jun 2023 20:32:05 -0700 Subject: [PATCH 112/495] Update ac command supporting files (#789) * Update ac command supporting files Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/789-ac-command-updates.yml | 3 + scripts/hosts.env | 65 ++++++++++++++++--- scripts/mounts.env | 3 +- scripts/requirements-2.15.env | 32 +++++++++ 4 files changed, 93 insertions(+), 10 deletions(-) create mode 100644 changelogs/fragments/789-ac-command-updates.yml create mode 100644 scripts/requirements-2.15.env diff --git a/changelogs/fragments/789-ac-command-updates.yml b/changelogs/fragments/789-ac-command-updates.yml new file mode 100644 index 000000000..c0c60dcf1 --- /dev/null +++ b/changelogs/fragments/789-ac-command-updates.yml @@ -0,0 +1,3 @@ +trivial: +- ac - Adds new mounts, targets and ansible 2.15 requirements.env. + (https://github.com/ansible-collections/ibm_zos_core/pull/789) \ No newline at end of file diff --git a/scripts/hosts.env b/scripts/hosts.env index 8351ba350..58075263d 100644 --- a/scripts/hosts.env +++ b/scripts/hosts.env @@ -22,21 +22,68 @@ # fi # fi -host_list_str="ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ +host_list_str="ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ +"richp:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ketan:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ +"iamorenosoto:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ +"fernando:ec01135a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01105a:ec01105a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01129a:ec01129a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01130a:ec01130a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01131a:ec01131a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec01132a:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01133a:ec01133a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec01134a:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec01135a:ec01135a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01136a:ec01136a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01137a:ec01137a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01138a:ec01138a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01139a:ec01139a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01145a:ec01145a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01146a:ec01146a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01147a:ec01147a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01148a:ec01148a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01149a:ec01149a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01152a:ec01152a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01153a:ec01153a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec01154a:ec01154a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03071a:ec03071a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03102a:ec03102a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03127a:ec03127a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03129a:ec03129a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03173a:ec03173a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec03175a:ec03175a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec32016a:ec32016a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec32024a:ec32024a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec32051a:ec32051a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33002a:ec33002a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33003a:ec33003a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33004a:ec33004a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33005a:ec33005a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33007a:ec33007a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33008a:ec33008a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33009a:ec33009a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33010a:ec33010a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33011a:ec33011a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33012a:ec33012a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33014a:ec33014a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33015a:ec33015a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33016a:ec33016a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33017a:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33018a:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33019a:ec33019a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33020a:ec33020a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33021a:ec33021a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33022a:ec33022a${HOST_SUFFIX}:${USER}:${PASS} "\ +"ec33023a:ec33023a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33024a:ec33024a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33025a:ec33025a${HOST_SUFFIX}:${USER}:${PASS} "\ "ec33026a:ec33026a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ -"richp:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ketan:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ -"iamorenosoto:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ -"fernando:ec01135a${HOST_SUFFIX}:${USER}:${PASS} " +"ec33027a:ec33027a${HOST_SUFFIX}:${USER}:${PASS} " diff --git a/scripts/mounts.env b/scripts/mounts.env index 8f944d971..876876cd3 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -38,7 +38,8 @@ zoau_mount_list_str="1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ "10:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ "11:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ "12:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ -"13:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " +"13:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ +"14:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " # ------------------------------------------------------------------------------ # PYTHON MOUNT TABLE diff --git a/scripts/requirements-2.15.env b/scripts/requirements-2.15.env new file mode 100644 index 000000000..5f8b36260 --- /dev/null +++ b/scripts/requirements-2.15.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.15.0" +"pylint" +"rstcheck" +) + +python=( +"python:3.9" +) From 558ef8b41c83d8e0f69a6bdb1b04fd4529e69729 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 2 Jun 2023 21:57:03 -0700 Subject: [PATCH 113/495] Update zos_data_set module member description Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_data_set.rst | 12 ++++++++--- plugins/modules/zos_data_set.py | 30 +++++++++++++++++++--------- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 046b8a2f5..c310069e8 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -65,6 +65,9 @@ state If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. + If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + + If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. @@ -74,7 +77,7 @@ state If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. - If *state=uncataloged* and the data set is not found, no action taken , module completes successfully with *changed=False*. + If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. @@ -330,6 +333,9 @@ batch If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. + If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + + If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. @@ -339,7 +345,7 @@ batch If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. - If *state=uncataloged* and the data set is not found, no action taken , module completes successfully with *changed=False*. + If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. @@ -352,7 +358,7 @@ batch type - The data set type to be used when creating a data set. (e.g ``pdse``) + The data set type to be used when creating a data set. (e.g ``PDSE``) ``MEMBER`` expects to be used with an existing partitioned data set. diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 3e7ee1700..dde8f3488 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -67,6 +67,12 @@ - > If I(state=present) and I(replace=False) and the data set is present on the managed node, no action taken, module completes successfully with I(changed=False). + - > + If I(state=present) and I(type=MEMBER) and the member does not exist in the data set, + create a member formatted to store data, module completes successfully with I(changed=True). + Note, a PDSE does not allow a mixture of formats such that there is + executables (program objects) and data. The member created is formatted to store data, + not an executable. - > If I(state=cataloged) and I(volumes) is provided and the data set is already cataloged, no action taken, module completes successfully with I(changed=False). @@ -79,11 +85,11 @@ module attempts to perform catalog using supplied I(name) and I(volumes). If the attempt to catalog the data set catalog fails, returns failure with I(changed=False). - > - If I(state=uncataloged) and the data set is not found, - no action taken , module completes successfully with I(changed=False). + If I(state=uncataloged) and the data set is not found, no action taken, + module completes successfully with I(changed=False). - > - If I(state=uncataloged) and the data set is found, - the data set is uncataloged, module completes successfully with I(changed=True). + If I(state=uncataloged) and the data set is found, the data set is uncataloged, + module completes successfully with I(changed=True). required: false type: str default: present @@ -314,6 +320,12 @@ - > If I(state=present) and I(replace=False) and the data set is present on the managed node, no action taken, module completes successfully with I(changed=False). + - > + If I(state=present) and I(type=MEMBER) and the member does not exist in the data set, + create a member formatted to store data, module completes successfully with I(changed=True). + Note, a PDSE does not allow a mixture of formats such that there is + executables (program objects) and data. The member created is formatted to store data, + not an executable. - > If I(state=cataloged) and I(volumes) is provided and the data set is already cataloged, no action taken, module completes successfully with I(changed=False). @@ -326,11 +338,11 @@ module attempts to perform catalog using supplied I(name) and I(volumes). If the attempt to catalog the data set catalog fails, returns failure with I(changed=False). - > - If I(state=uncataloged) and the data set is not found, - no action taken , module completes successfully with I(changed=False). + If I(state=uncataloged) and the data set is not found, no action taken, + module completes successfully with I(changed=False). - > - If I(state=uncataloged) and the data set is found, - the data set is uncataloged, module completes successfully with I(changed=True). + If I(state=uncataloged) and the data set is found, the data set is uncataloged, + module completes successfully with I(changed=True). required: false type: str default: present @@ -341,7 +353,7 @@ - uncataloged type: description: - - The data set type to be used when creating a data set. (e.g C(pdse)) + - The data set type to be used when creating a data set. (e.g C(PDSE)) - C(MEMBER) expects to be used with an existing partitioned data set. - Choices are case-insensitive. required: false From ac8559ae4f7d83b2f380332fd4b71ba219b26a19 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 2 Jun 2023 21:57:47 -0700 Subject: [PATCH 114/495] Add recently changed module doc from prior commits Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_job_query.rst | 51 +++++++++++++++++++++++++- docs/source/modules/zos_job_submit.rst | 43 +++++++++++++++++++++- 2 files changed, 91 insertions(+), 3 deletions(-) diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index d34098617..40bd7b353 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -133,19 +133,31 @@ jobs [ { + "asid": 0, + "creation_datetime": "20230503T121300", + "job_class": "K", "job_id": "JOB01427", "job_name": "LINKJOB", "owner": "ADMIN", - "ret_code": "null" + "priority": 1, + "queue_position": 3, + "ret_code": "null", + "svc_class": "?" }, { + "asid": 4, + "creation_datetime": "20230503T121400", + "job_class": "A", "job_id": "JOB16577", "job_name": "LINKCBL", "owner": "ADMIN", + "priority": 0, + "queue_position": 0, "ret_code": { "code": "null", "msg": "CANCELED" - } + }, + "svc_class": "E" } ] @@ -232,6 +244,41 @@ jobs + job_class + Letter indicating job class for this job. + + | **type**: str + | **sample**: A + + svc_class + Character indicating service class for this job. + + | **type**: str + | **sample**: C + + priority + A numeric indicator of the job priority assigned through JES. + + | **type**: int + | **sample**: 4 + + asid + An identifier created by JES. + + | **type**: int + + creation_datetime + Date and time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 20230504T141500 + + queue_position + Integer of the position within the job queue where this jobs resided. + + | **type**: int + | **sample**: 3 + message Message returned on failure. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index bb438f8a5..4375564bb 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -223,8 +223,10 @@ jobs [ { + "asid": 0, "class": "K", "content_type": "JOB", + "creation_datetime": "20230503T121300", "ddnames": [ { "byte_count": "677", @@ -419,9 +421,12 @@ jobs "stepname": "DLORD6" } ], + "job_class": "K", "job_id": "JOB00361", "job_name": "DBDGEN00", "owner": "OMVSADM", + "priority": 1, + "queue_position": 3, "ret_code": { "code": 0, "msg": "CC 0000", @@ -434,7 +439,8 @@ jobs } ] }, - "subsystem": "STL1" + "subsystem": "STL1", + "svc_class": "?" } ] @@ -588,6 +594,41 @@ jobs + job_class + Letter indicating job class for this job. + + | **type**: str + | **sample**: A + + svc_class + Character indicating service class for this job. + + | **type**: str + | **sample**: C + + priority + A numeric indicator of the job priority assigned through JES. + + | **type**: int + | **sample**: 4 + + asid + An identifier created by JES. + + | **type**: int + + creation_datetime + Date and time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 20230504T141500 + + queue_position + Integer of the position within the job queue where this jobs resided. + + | **type**: int + | **sample**: 3 + message This option is being deprecated From 84060ffa374312aa2395cdcc5725c4b74c77ee3b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 2 Jun 2023 22:07:57 -0700 Subject: [PATCH 115/495] Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/791-doc-zos_data_set-member-update.yml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 changelogs/fragments/791-doc-zos_data_set-member-update.yml diff --git a/changelogs/fragments/791-doc-zos_data_set-member-update.yml b/changelogs/fragments/791-doc-zos_data_set-member-update.yml new file mode 100644 index 000000000..4ab0eee03 --- /dev/null +++ b/changelogs/fragments/791-doc-zos_data_set-member-update.yml @@ -0,0 +1,5 @@ +trivial: +- zos_data_set - when a member is created by the module, the format is type + data which is not suitable for executables. This change describes the + format used when creating member. + (https://github.com/ansible-collections/ibm_zos_core/pull/791) \ No newline at end of file From 3ab9d7821b3510967683fc18a4141b816e343b0d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 6 Jun 2023 01:04:21 -0700 Subject: [PATCH 116/495] Adding fix for uncataloged vsam and non-vsam data sets Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/data_set.py | 72 ++++++++++++++++++++++++++------ 1 file changed, 60 insertions(+), 12 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 8295a6541..d2781c0d5 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -429,17 +429,60 @@ def delete_uncataloged_dataset(name, volumes): Returns: bool -- Return code from the mvs_cmd, if 0 then it was successful. """ - # if is VSAM is_vsam(name, volumes) - vsam_code = 'NVR' - vsam_name_extension = '' + + # NVR specifies that the object to be deleted is an SMS-managed non-VSAM + # volume record (NVR) entry. This parameter must be specified to delete + # an NVR from a VSAM volume data set (VVDS) and its corresponding record + # from the VTOC. The NVR/VTOC entries are deleted only if the related + # non-VSAM object catalog entry does not exist. + + # VVR specifies that the objects to be deleted are one or more unrelated + # VSAM volume record (VVR) entries. To delete a VVR from both the VSAM + # volume data set (VVDS) and from the VTOC, you must specify this parameter. + + # To delete a VSAM DS that is not cataloged you must delete each VSAM record + # for that VSAM type and use VVR and FILE. You can simulate a uncataloged DS + # with commands: + # - echo " DELETE IBMUSER.VSAM.KSDS CLUSTER NOSCRATCH NOPURGE" | + # mvscmdauth --pgm=IDCAMS --sysprint=* --sysin=stdin + # echo " DELETE IBMUSER.DATASET NOSCRATCH" | mvscmdauth --pgm=IDCAMS + # --sysprint=* --sysin=stdin + if DataSet.is_vsam(name, volumes): - vsam_code = 'VVR' + vol_record_entry = 'VVR' + # Delete the DATA record of a VSAM, applies to KSDS, RRDS, ESDS, LDS vsam_name_extension = '.DATA' - command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vsam_code) - dds = dict(DD1=',vol,'.join(volumes) + ',vol') - rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) - if rc > 0: - raise DatasetDeleteError(name, rc) + command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vol_record_entry) + dds = dict(DD1=',vol,'.join(volumes) + ',vol') + rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) + # RC 8 occurs when the VSAM Record does not exist, thus acceptable + if rc > 8: + raise DatasetDeleteError(name, rc) + + # Delete the INDEX record of a VSAM, this does NOT apply to RRDS, ESDS, LDS but + # the VASAM is not in catalog so we can't detect the type of VSAM so we + # can expect an RC 8 to appear for non KSDS types. + vsam_name_extension = '.INDEX' + command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vol_record_entry) + dds = dict(DD1=',vol,'.join(volumes) + ',vol') + rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) + # RC 8 occurs when the VSAM Record does exist, thus acceptable + if rc > 8: + raise DatasetDeleteError(name, rc) + else: + vol_record_entry = 'NVR' + command = " DELETE {0} FILE(DD1) {1}".format(name, vol_record_entry) + dds = dict(DD1=',vol,'.join(volumes) + ',vol') + rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) + # RC 8 occurs when the VSAM Record does not exist, thus acceptable + if rc > 8: + raise DatasetDeleteError(name, rc) + + # Callers expect a RC 0 to evaluate if there was a change, so normalize + # to rc 0 + if rc <= 8: + rc = 0 + return rc @staticmethod @@ -464,7 +507,8 @@ def data_set_shared_members(src, dest): @staticmethod def attempt_to_delete_uncataloged_data_set_if_necessary(name, volumes): - """Attempt to delete any uncataloged dataset if exists on any volume and there is a cataloged dataset with the same name. + """Attempt to delete any uncataloged dataset if exists on any user provided volumes + and there is a cataloged dataset with the same name. Arguments: name (str) -- The data set name to check if cataloged. volumes (list[str]) -- The volumes the data set may reside on. @@ -481,9 +525,13 @@ def attempt_to_delete_uncataloged_data_set_if_necessary(name, volumes): cataloged_volume_list = DataSet.get_volume_list_for_cataloged_data_set(name) if len(cataloged_volume_list) == 0: return changed, present, True - # If any volume provided is not in the list, means we need to delete it from uncataloged dataset. + + # If a volume provided (volumes) is not in the list cataloged_volume_list, we need to + # delete them from the cataloged_volume_list, this leaves us with with uncataloged data sets that + # correspond to the volumes argument. volumes_for_uncataloged_dataset = list(filter(lambda vol: vol not in cataloged_volume_list, volumes)) - # If any volume provided is in the list we will delete from the catalog as normal. + + # If any volume provided (volumes) is in the list we will delete from catalog as normal. pending_to_delete_cataloged_dataset = any(vol in volumes for vol in cataloged_volume_list) if len(volumes_for_uncataloged_dataset) > 0: From 6b02d2a82a062458fddd0f5ffcbbb25961c86792 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 6 Jun 2023 09:00:23 -0600 Subject: [PATCH 117/495] Encode files recursively and test case for keep behavior. (#772) * Bring the jinja2 solution to dev and add test case * Add fragment * Solve problem z/OS 2.5 HFS * Declaration error solve * Need to check the validation with HFS * Ensure validating z/OS work with HFS * Change inecesary changes and fragments q * Return all test cases to normal * Return all test cases to normal * Create the local test case * Add local test case and change test case to be acurate * Get better cleanup of test-case * Update test_zos_data_set_func.py Equalize test mount func --- ...sively-and-test-case-for-keep-behavior.yml | 5 ++ plugins/modules/zos_copy.py | 18 ++-- .../functional/modules/test_zos_copy_func.py | 84 ++++++++++++++++++- 3 files changed, 96 insertions(+), 11 deletions(-) create mode 100644 changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml diff --git a/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml b/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml new file mode 100644 index 000000000..672c454b7 --- /dev/null +++ b/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. + Fix now encodes all content in a source directory, including + subdirectories. + (https://github.com/ansible-collections/ibm_zos_core/pull/772). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 739c0d8d0..e5df77787 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -866,17 +866,17 @@ def _convert_encoding_dir(self, dir_path, from_code_set, to_code_set): EncodingConversionError -- When the encoding of a USS file is not able to be converted """ - path, dirs, files = next(os.walk(dir_path)) enc_utils = encode.EncodeUtils() - for file in files: - full_file_path = path + "/" + file - rc = enc_utils.uss_convert_encoding( - full_file_path, full_file_path, from_code_set, to_code_set - ) - if not rc: - raise EncodingConversionError( - full_file_path, from_code_set, to_code_set + for path, dirs, files in os.walk(dir_path): + for file_path in files: + full_file_path = os.path.join(path, file_path) + rc = enc_utils.uss_convert_encoding( + full_file_path, full_file_path, from_code_set, to_code_set ) + if not rc: + raise EncodingConversionError( + full_file_path, from_code_set, to_code_set + ) def _tag_file_encoding(self, file_path, tag, is_dir=False): """Tag the file specified by 'file_path' with the given code set. diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 97ec099dc..781ec80bc 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -29,8 +29,7 @@ DUMMY DATA ---- LINE 004 ------ DUMMY DATA ---- LINE 005 ------ DUMMY DATA ---- LINE 006 ------ -DUMMY DATA ---- LINE 007 ------ -""" +DUMMY DATA ---- LINE 007 ------""" DUMMY_DATA_SPECIAL_CHARS = """DUMMY DATA ---- LINE 001 ------ DUMMY DATA ---- LINE ÁÁÁ------ @@ -468,6 +467,87 @@ def test_copy_dir_to_existing_uss_dir_not_forced(ansible_zos_module): hosts.all.file(path=dest_dir, state="absent") +@pytest.mark.uss +def test_copy_subdirs_folders_and_validate_recursive_encoding(ansible_zos_module): + hosts = ansible_zos_module + dest_path = "/tmp/test/" + text_outer_file = "Hi I am point A" + text_inner_file = "Hi I am point B" + src_path = "/tmp/level_1/" + outer_file = "/tmp/level_1/text_A.txt" + inner_src_path = "/tmp/level_1/level_2/" + inner_file = "/tmp/level_1/level_2/text_B.txt" + + try: + hosts.all.file(path=inner_src_path, state="directory") + hosts.all.file(path=inner_file, state = "touch") + hosts.all.file(path=outer_file, state = "touch") + hosts.all.shell(cmd="echo '{0}' > '{1}'".format(text_outer_file, outer_file)) + hosts.all.shell(cmd="echo '{0}' > '{1}'".format(text_inner_file, inner_file)) + + copy_res = hosts.all.zos_copy(src=src_path, dest=dest_path, encoding={"from": "ISO8859-1", "to": "IBM-1047"}, remote_src=True) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + + stat_res = hosts.all.stat(path="/tmp/test/level_2/") + for st in stat_res.contacted.values(): + assert st.get("stat").get("exists") is True + + full_inner_path = dest_path + "/level_2/text_B.txt" + full_outer_path = dest_path + "/text_A.txt" + inner_file_text_aft_encoding = hosts.all.shell(cmd="cat {0}".format(full_inner_path)) + outer_file_text_aft_encoding = hosts.all.shell(cmd="cat {0}".format(full_outer_path)) + for text in outer_file_text_aft_encoding.contacted.values(): + text_outer = text.get("stdout") + for text in inner_file_text_aft_encoding.contacted.values(): + text_inner = text.get("stdout") + + assert text_inner == text_inner_file + assert text_outer == text_outer_file + finally: + hosts.all.file(path=src_path, state="absent") + hosts.all.file(path=dest_path, state="absent") + + +@pytest.mark.uss +def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_module): + hosts = ansible_zos_module + dest_path = "/tmp/test/" + + try: + source_1 = tempfile.TemporaryDirectory(prefix="level_", suffix="_1") + source = source_1.name + source_2 = tempfile.TemporaryDirectory(dir = source, prefix="level_", suffix="_2") + full_source = source_2.name + populate_dir(source) + populate_dir(full_source) + level_1 = os.path.basename(source) + level_2 = os.path.basename(full_source) + + copy_res = hosts.all.zos_copy(src=source, dest=dest_path, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + + full_outer_file= "{0}/{1}/file3".format(dest_path, level_1) + full_iner_file= "{0}/{1}/{2}/file3".format(dest_path, level_1, level_2) + verify_copy_1 = hosts.all.shell(cmd="cat {0}".format(full_outer_file)) + verify_copy_2 = hosts.all.shell(cmd="cat {0}".format(full_iner_file)) + + for result in verify_copy_1.contacted.values(): + print(result) + assert result.get("stdout") == DUMMY_DATA + for result in verify_copy_2.contacted.values(): + print(result) + assert result.get("stdout") == DUMMY_DATA + finally: + hosts.all.file(name=dest_path, state="absent") + source_1.cleanup(ignore_cleanup_errors = True) + + @pytest.mark.uss @pytest.mark.parametrize("copy_directory", [False, True]) def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory): From 10ab418addd70167da31559d3b0aa592660bad8a Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 6 Jun 2023 13:41:46 -0700 Subject: [PATCH 118/495] Update ac to support a single test (#793) * Update ac to support a single test Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update test description Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- ac | 42 ++++++++++++------- .../fragments/789-ac-command-add-test.yml | 3 ++ 2 files changed, 31 insertions(+), 14 deletions(-) create mode 100644 changelogs/fragments/789-ac-command-add-test.yml diff --git a/ac b/ac index b01fa8bf8..aeb122423 100755 --- a/ac +++ b/ac @@ -188,6 +188,9 @@ option_processor(){ fi } +# If option_processor echo's an exit , the sanitize will execute it else it will +# just be a echo, might be worth seeing if this can just be called or embedded +# into the option_processor to simplify the calls option_sanitize(){ option_value=$1 $option_value 2> /dev/null @@ -317,9 +320,12 @@ ac_sanity(){ ## zoau - ZOAU to use in testing, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1, ## no selection defaults to 1.1.1 . ## file - the absoulte path to a test suite to run, no selection -## defaults to all tests running. +## defaults to all test suite running. +## test - a test case to run found in 'file', no selection +## defaults to all tests in file running. ## debug - enable debug for pytest (-s), choices are true and false ## Example: +## $ ac --ac-test --host ec01150a --python 3.10 --zoau 1.2.2 --file tests/functional/modules/test_zos_operator_func.py --test test_zos_operator_positive_path --debug true ## $ ac --ac-test --host ec33012a --python 3.10 --zoau 1.2.2 --file tests/functional/modules/test_zos_operator_func.py --debug true ## $ ac --ac-test --file tests/functional/modules/test_zos_operator_func.py --debug true ## $ ac --ac-test @@ -328,7 +334,18 @@ ac_test(){ python=$2 zoau=$3 file=$4 - debug=$5 + test=$5 + debug=$6 + + # Run test by node IDs, eg pytest -v tests/my-directory/test_demo.py::test_specific_function + if [ "$file" ] && [ "$test" ]; then + file="${file}::${test}" + fi + + if [ "$debug" ]; then + debug="-s" + fi + skip=$CURR_DIR/tests/functional/modules/test_module_security.py # Create the config always overwriting existing @@ -343,20 +360,12 @@ ac_test(){ #cd ${VENV_BIN} if [ "$file" ]; then - if [ "$debug" ]; then - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml -s - else - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml - fi + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml "${debug}" else for file in `ls tests/functional/modules/*.py`; do - # For some reason '--ignor'e not being honored so injecting a work around + # For some reason '--ignore not being honored so injecting a work around if [ "$file" != "$skip" ]; then - if [ "$debug" ]; then - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml -s - else - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml - fi + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml "${debug}" fi done fi @@ -722,6 +731,11 @@ while true; do option_sanitize $python shift ;; + --test|--test=?*) # option + test=`option_processor $1 $2` + option_sanitize $test + shift + ;; # --tests|--tests=?*) # option # tests=`option_processor $1 $2` # option_sanitize $tests @@ -771,7 +785,7 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-sanity" ] ; then ac_sanity $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test" ] ; then - ac_test ${host:=""} ${python:=""} ${zoau:=""} ${file:=""} ${debug:=""} + ac_test ${host:=""} ${python:=""} ${zoau:=""} ${file:=""} ${test:=""} ${debug:=""} elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test-config" ] ; then ac_test_config elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-version" ] ; then diff --git a/changelogs/fragments/789-ac-command-add-test.yml b/changelogs/fragments/789-ac-command-add-test.yml new file mode 100644 index 000000000..56cae6936 --- /dev/null +++ b/changelogs/fragments/789-ac-command-add-test.yml @@ -0,0 +1,3 @@ +trivial: +- ac - Adds support to run single test from test suite. + (https://github.com/ansible-collections/ibm_zos_core/pull/793) \ No newline at end of file From 98c70475ae959caae753bde9282500982a7340d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 8 Jun 2023 09:25:05 -0600 Subject: [PATCH 119/495] Return the dynamically created destination attributes (#773) * First iteration to get dynamic values * Spaces and lines rectified * Add validation and extra variable to ensure consistency * Whitespaces * Change imports in test_zos_mount_func * Update test_zos_fetch_func imports * Update all imports for pipelines runs * Revert "Update all imports for pipelines runs" This reverts commit 1b370a2ba3c0001c316e0121ddab82ae7cc6d75d. Return one commit * Update data_set.py imports * Revert "Update data_set.py imports" This reverts commit 37561b0a12e04faaee8307a5541b71469dbe721d. * Update data_set imports * Update data_set imports * Update data_set imports * Restore import * Restore the imports * Add fragment * Solve a typo * Solve z/OS 2.5 HFS * Solve declaration error * Solve HFS and solution by now * Ensure HFS working with HFS * Better working on HFS testing problems * Change to cover many cases and add test * Modified changelog, corrected typos and shortemed file name * Delete 773-Return-the-dynamically-created-destintation-attributres.yaml * Update test_zos_data_set_func.py * Add documentation * Adjust spaces * Solve spaces in documentation * Solve problems on spaces in documentation * Adjust fragment and add validation for vsams * Better redaction to documentation * Solve spaces * Change documentation of code and collection * Change words in documentation --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...return-dynamically-created-dest-attrs.yaml | 6 + plugins/action/zos_copy.py | 7 +- plugins/modules/zos_copy.py | 109 +++++++++++++++++- .../functional/modules/test_zos_copy_func.py | 7 ++ 4 files changed, 124 insertions(+), 5 deletions(-) create mode 100644 changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml diff --git a/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml b/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml new file mode 100644 index 000000000..0a8ce0adb --- /dev/null +++ b/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml @@ -0,0 +1,6 @@ +minor_changes: +- zos_copy - Adds block_size, record_format, record_length, space_primary, + space_secondary, space_type and type in the return output when + the destination data set does not exist and has to be created + by the module. + (https://github.com/ansible-collections/ibm_zos_core/pull/773) \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 6847b9ac5..5fa861b61 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -344,6 +344,7 @@ def _update_result(is_binary, copy_res, original_args): src = copy_res.get("src") note = copy_res.get("note") backup_name = copy_res.get("backup_name") + dest_data_set_attrs = copy_res.get("dest_data_set_attrs") updated_result = dict( dest=copy_res.get("dest"), is_binary=is_binary, @@ -356,7 +357,6 @@ def _update_result(is_binary, copy_res, original_args): updated_result["note"] = note if backup_name: updated_result["backup_name"] = backup_name - if ds_type == "USS": updated_result.update( dict( @@ -372,6 +372,11 @@ def _update_result(is_binary, copy_res, original_args): checksum = copy_res.get("checksum") if checksum: updated_result["checksum"] = checksum + if dest_data_set_attrs is not None: + if len(dest_data_set_attrs) > 0: + dest_data_set_attrs.pop("name") + updated_result["dest_created"] = True + updated_result["destination_attributes"] = dest_data_set_attrs return updated_result diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index e5df77787..ffb9ccbe4 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -563,6 +563,61 @@ returned: success type: str sample: SAMPLE.SEQ.DATA.SET +dest_created: + description: Indicates whether the module created the destination. + returned: success and if dest was created by the module. + type: bool + sample: true +destination_attributes: + description: Attributes of a dest created by the module. + returned: success and destination was created by the module. + type: dict + contains: + block_size: + description: + Block size of the dataset. + type: int + sample: 32760 + record_format: + description: + Record format of the dataset. + type: str + sample: FB + record_length: + description: + Record length of the dataset. + type: int + sample: 45 + space_primary: + description: + Allocated primary space for the dataset. + type: int + sample: 2 + space_secondary: + description: + Allocated secondary space for the dataset. + type: int + sample: 1 + space_type: + description: + Unit of measurement for space. + type: str + sample: K + type: + description: + Type of dataset allocated. + type: str + sample: PDSE + sample: + { + "block_size": 32760, + "record_format": "FB", + "record_length": 45, + "space_primary": 2, + "space_secondary": 1, + "space_type": "K", + "type": "PDSE" + } checksum: description: SHA256 checksum of the file after running zos_copy. returned: C(validate) is C(true) and if dest is USS @@ -1977,6 +2032,45 @@ def is_member_wildcard(src): ) +def get_attributes_of_any_dataset_created( + dest, + src_ds_type, + src, + src_name, + is_binary, + volume=None +): + """ + Get the attributes of dataset created by the function allocate_destination_data_set + except for VSAM. + + Arguments: + dest (str) -- Name of the destination data set. + src_ds_type (str) -- Source of the destination data set. + src (str) -- Name of the source data set, used as a model when appropiate. + src_name (str) -- Extraction of the source name without the member pattern. + is_binary (bool) -- Whether the data set will contain binary data. + volume (str, optional) -- Volume where the data set should be allocated into. + + Returns: + params (dict) -- Parameters used for the dataset created as name, type, + space_primary, space_secondary, record_format, record_length, block_size and space_type + """ + params = {} + if src_ds_type == "USS": + if os.path.isfile(src): + size = os.stat(src).st_size + params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + else: + size = os.path.getsize(src) + params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + else: + src_attributes = datasets.listing(src_name)[0] + size = int(src_attributes.total_space) + params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + return params + + def allocate_destination_data_set( src, dest, @@ -2006,6 +2100,9 @@ def allocate_destination_data_set( Returns: bool -- True if the data set was created, False otherwise. + dest_params (dict) -- Parameters used for the dataset created as name, + block_size, record_format, record_length, space_primary, space_secondary, + space_type, type. """ src_name = data_set.extract_dsname(src) is_dest_empty = data_set.DataSet.is_empty(dest) if dest_exists else True @@ -2014,8 +2111,11 @@ def allocate_destination_data_set( # empty dataset was created for the user by an admin/operator, and they don't have permissions # to create new datasets. # These rules assume that source and destination types are compatible. + # Create the dict that will contains the values created by the module if it's empty action module will + # not display the content. + dest_params = {} if dest_exists and is_dest_empty: - return False + return False, dest_params # Giving more priority to the parameters given by the user. if dest_data_set: @@ -2086,8 +2186,9 @@ def allocate_destination_data_set( volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) - - return True + if dest_ds_type not in data_set.DataSet.MVS_VSAM: + dest_params = get_attributes_of_any_dataset_created(dest, src_ds_type, src, src_name, is_binary, volume) + return True, dest_params def normalize_line_endings(src, encoding=None): @@ -2449,7 +2550,7 @@ def run_module(module, arg_def): try: if not is_uss: - res_args["changed"] = allocate_destination_data_set( + res_args["changed"], res_args["dest_data_set_attrs"] = allocate_destination_data_set( temp_path or src, dest_name, src_ds_type, dest_ds_type, diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 781ec80bc..374bf2b47 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1295,6 +1295,7 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True assert cp_res.get("is_binary") == src["is_binary"] for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 @@ -1467,6 +1468,7 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -1816,6 +1818,7 @@ def test_copy_file_to_non_existing_pdse(ansible_zos_module, is_remote): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_path + assert cp_res.get("dest_created") is True for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -1844,6 +1847,7 @@ def test_copy_dir_to_non_existing_pdse(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 finally: @@ -1875,6 +1879,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert len(result.get("stdout_lines")) == 2 @@ -1954,6 +1959,7 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -2415,6 +2421,7 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" From b121c38e09e6016b48fd1ec652c1c27476c5c4ad Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 15:34:03 -0700 Subject: [PATCH 120/495] Updated ac command to clean up the collections directory Signed-off-by: ddimatos <dimatos@gmail.com> --- ac | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ac b/ac index b01fa8bf8..45e0cf1ec 100755 --- a/ac +++ b/ac @@ -361,6 +361,9 @@ ac_test(){ done fi + # Clean up the collections folder after running the tests, temporary work around. + rm -rf collections/ansible_collections + #cd ${CURR_DIR} } From f94eb0c23850027328e4d20f07a224ab76b07c0b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 15:48:40 -0700 Subject: [PATCH 121/495] Fixes the issue of parts of a vsam cluster remaining behind and allows user to correctly delete DS not in cat Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/data_set.py | 338 ++++++++++-------- .../modules/test_zos_data_set_func.py | 83 +++-- 2 files changed, 227 insertions(+), 194 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index d2781c0d5..f1aa1ee22 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -13,6 +13,7 @@ __metaclass__ = type +import pprint import re import tempfile from os import path, walk @@ -68,25 +69,18 @@ class DataSet(object): } _VSAM_CATALOG_COMMAND_NOT_INDEXED = """ DEFINE CLUSTER - - (NAME('{0}') - - VOLUMES({1} - - ) - - RECATALOG - - {2}) - - DATA( - - NAME('{0}.DATA')) + (NAME('{0}') - + VOLUMES({1}) - + RECATALOG {2}) - + DATA(NAME('{0}.DATA')) """ _VSAM_CATALOG_COMMAND_INDEXED = """ DEFINE CLUSTER - - (NAME('{0}') - - VOLUMES({1} - - ) - - RECATALOG - - {2}) - - DATA( - - NAME('{0}.DATA')) - - INDEX( - - NAME('{0}.INDEX')) + (NAME('{0}') - + VOLUMES({1}) - + RECATALOG {2}) - + DATA(NAME('{0}.DATA')) - + INDEX(NAME('{0}.INDEX')) """ _NON_VSAM_UNCATALOG_COMMAND = " UNCATLG DSNAME={0}" @@ -214,22 +208,14 @@ def ensure_absent(name, volumes=None): name (str) -- The name of the data set to ensure is absent. volumes (list[str]) -- The volumes the data set may reside on. Returns: - bool -- Indicates if changes were made. + changed (bool) -- Indicates if changes were made. """ - if volumes: - changed, present, pending_to_catalog_and_delete = DataSet.attempt_to_delete_uncataloged_data_set_if_necessary( - name, volumes) - if not pending_to_catalog_and_delete: - return changed - present, changed = DataSet.attempt_catalog_if_necessary(name, volumes) - if present: - DataSet.delete(name) - return True - return False + changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes) - # ? should we do additional check to ensure member was actually created? + return changed + # ? should we do additional check to ensure member was actually created? @staticmethod def ensure_member_present(name, replace=False): """Creates data set member if it does not already exist. @@ -270,7 +256,7 @@ def ensure_cataloged(name, volumes): Returns: bool -- If changes were made. """ - if DataSet.data_set_cataloged(name): + if DataSet.data_set_cataloged(name, None): return False try: DataSet.catalog(name, volumes) @@ -345,7 +331,7 @@ def allocate_model_data_set(ds_name, model, vol=None): raise MVSCmdExecError(rc, out, err) @staticmethod - def data_set_cataloged(name): + def data_set_cataloged(name, volumes=None): """Determine if a data set is in catalog. Arguments: @@ -354,18 +340,26 @@ def data_set_cataloged(name): Returns: bool -- If data is is cataloged. """ + name = name.upper() module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name) rc, stdout, stderr = module.run_command( "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin ) - if re.search(r"-\s" + name + r"\s*\n\s+IN-CAT", stdout): - return True + + if volumes: + cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name) or [] + if bool(set(volumes) & set (cataloged_volume_list)): + return True + else: + if re.search(r"-\s" + name + r"\s*\n\s+IN-CAT", stdout): + return True + return False @staticmethod - def get_volume_list_for_cataloged_data_set(name): + def data_set_cataloged_volume_list(name): """Get the volume list for a cataloged dataset name. Arguments: name (str) -- The data set name to check if cataloged. @@ -380,8 +374,8 @@ def get_volume_list_for_cataloged_data_set(name): ) delimiter = 'VOLSER------------' arr = stdout.split(delimiter) - # If a volume serial is not always of lenght 6 we could use ":x.find(' ')" here instead of that index. - volume_list = [x[:x.find(' ')] for x in arr[1:]] + # A volume serial (VOLSER) is not always of fixed length, use ":x.find(' ')" here instead of arr[index]. + volume_list = list(set([x[:x.find(' ')] for x in arr[1:]])) return volume_list @staticmethod @@ -420,71 +414,6 @@ def data_set_member_exists(name): return False return True - @staticmethod - def delete_uncataloged_dataset(name, volumes): - """Delete an uncataloged dataset by specifying volumes. - Arguments: - name (str) -- The data set name to check if cataloged. - volumes (list[str]) -- The volumes the data set may reside on. - Returns: - bool -- Return code from the mvs_cmd, if 0 then it was successful. - """ - - # NVR specifies that the object to be deleted is an SMS-managed non-VSAM - # volume record (NVR) entry. This parameter must be specified to delete - # an NVR from a VSAM volume data set (VVDS) and its corresponding record - # from the VTOC. The NVR/VTOC entries are deleted only if the related - # non-VSAM object catalog entry does not exist. - - # VVR specifies that the objects to be deleted are one or more unrelated - # VSAM volume record (VVR) entries. To delete a VVR from both the VSAM - # volume data set (VVDS) and from the VTOC, you must specify this parameter. - - # To delete a VSAM DS that is not cataloged you must delete each VSAM record - # for that VSAM type and use VVR and FILE. You can simulate a uncataloged DS - # with commands: - # - echo " DELETE IBMUSER.VSAM.KSDS CLUSTER NOSCRATCH NOPURGE" | - # mvscmdauth --pgm=IDCAMS --sysprint=* --sysin=stdin - # echo " DELETE IBMUSER.DATASET NOSCRATCH" | mvscmdauth --pgm=IDCAMS - # --sysprint=* --sysin=stdin - - if DataSet.is_vsam(name, volumes): - vol_record_entry = 'VVR' - # Delete the DATA record of a VSAM, applies to KSDS, RRDS, ESDS, LDS - vsam_name_extension = '.DATA' - command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vol_record_entry) - dds = dict(DD1=',vol,'.join(volumes) + ',vol') - rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) - # RC 8 occurs when the VSAM Record does not exist, thus acceptable - if rc > 8: - raise DatasetDeleteError(name, rc) - - # Delete the INDEX record of a VSAM, this does NOT apply to RRDS, ESDS, LDS but - # the VASAM is not in catalog so we can't detect the type of VSAM so we - # can expect an RC 8 to appear for non KSDS types. - vsam_name_extension = '.INDEX' - command = " DELETE {0} FILE(DD1) {1}".format(name + vsam_name_extension, vol_record_entry) - dds = dict(DD1=',vol,'.join(volumes) + ',vol') - rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) - # RC 8 occurs when the VSAM Record does exist, thus acceptable - if rc > 8: - raise DatasetDeleteError(name, rc) - else: - vol_record_entry = 'NVR' - command = " DELETE {0} FILE(DD1) {1}".format(name, vol_record_entry) - dds = dict(DD1=',vol,'.join(volumes) + ',vol') - rc, stdout, stderr = mvs_cmd.idcams(cmd=command, dds=dds, authorized=True) - # RC 8 occurs when the VSAM Record does not exist, thus acceptable - if rc > 8: - raise DatasetDeleteError(name, rc) - - # Callers expect a RC 0 to evaluate if there was a change, so normalize - # to rc 0 - if rc <= 8: - rc = 0 - - return rc - @staticmethod def data_set_shared_members(src, dest): """Checks for the existence of members from a source data set in @@ -505,45 +434,6 @@ def data_set_shared_members(src, dest): return False - @staticmethod - def attempt_to_delete_uncataloged_data_set_if_necessary(name, volumes): - """Attempt to delete any uncataloged dataset if exists on any user provided volumes - and there is a cataloged dataset with the same name. - Arguments: - name (str) -- The data set name to check if cataloged. - volumes (list[str]) -- The volumes the data set may reside on. - Returns: - bool -- If any action was performed on the data. - bool -- If the dataset is still present. - bool -- If given the volumes list and dataset name we need to continue with deleting the dataset as usual, - either by cataloging it and deleting or deleting a cataloged dataset. - """ - changed = False - present = True - pending_to_delete_cataloged_dataset = False - # Get the list of volumes that the dataset is catalogued in. - cataloged_volume_list = DataSet.get_volume_list_for_cataloged_data_set(name) - if len(cataloged_volume_list) == 0: - return changed, present, True - - # If a volume provided (volumes) is not in the list cataloged_volume_list, we need to - # delete them from the cataloged_volume_list, this leaves us with with uncataloged data sets that - # correspond to the volumes argument. - volumes_for_uncataloged_dataset = list(filter(lambda vol: vol not in cataloged_volume_list, volumes)) - - # If any volume provided (volumes) is in the list we will delete from catalog as normal. - pending_to_delete_cataloged_dataset = any(vol in volumes for vol in cataloged_volume_list) - - if len(volumes_for_uncataloged_dataset) > 0: - volumes = list(filter(lambda vol: DataSet._is_in_vtoc(name, vol), volumes)) - if len(volumes) > 0: - present = DataSet.delete_uncataloged_dataset(name, volumes) - changed = present == 0 - else: - changed = False - - return changed, present, pending_to_delete_cataloged_dataset - @staticmethod def get_member_name_from_file(file_name): """Creates a member name for a partitioned data set by taking up to the @@ -623,7 +513,7 @@ def data_set_volume(name): @staticmethod def data_set_type(name, volume=None): - """Checks the type of a data set. + """Checks the type of a data set, data sets must be cataloged. Arguments: name (str) -- The name of the data set. @@ -783,6 +673,114 @@ def attempt_catalog_if_necessary(name, volumes): present = True return present, changed + @staticmethod + def attempt_catalog_if_necessary_and_delete(name, volumes): + """Attempts to catalog a data set if not already cataloged, then deletes + the data set. + This is helpful when a data set currently cataloged is not the data + set needing to be deleted, meaning the one in the provided volumes + is needing to be deleted.. Recall, you can have a data set in + two different volumes, and only one cataloged. + + Arguments: + name (str) -- The name of the data set. + volumes (list[str]) -- The volumes the data set may reside on. + + Returns: + changed (bool) -- Whether changes were made. + present (bool) -- Whether the data set is now present. + """ + + changed = False + present = True + + if volumes: + # Check if the data set is cataloged + present = DataSet.data_set_cataloged(name) + + if present: + # Data set is cataloged, now check it its cataloged on the provided volumes + # If it is, we just delete because the DS is the right one wanting deletion. + present = DataSet.data_set_cataloged(name, volumes) + + if present: + DataSet.delete(name) + changed = True + present = False + else: + # It appears that what is in catalog does not match the provided + # volumes, therefore the user wishes we delete a data set on a + # particular volue, NOT what is in catalog. + # for the provided volumes + + # We need to identify the volumes where the current cataloged data set + # is located for use later when we recatalog. Code is strategically + # placed before the uncatalog. + cataloged_volume_list_original = DataSet.data_set_cataloged_volume_list(name) + + try: + DataSet.uncatalog(name) + except DatasetUncatalogError: + return changed, present + + # Catalog the data set for the provided volumes + try: + DataSet.catalog(name, volumes) + except DatasetCatalogError: + try: + # A failure, so recatalog the original data set on the original volumes + DataSet.catalog(name,cataloged_volume_list_original) + except DatasetCatalogError: + pass + return changed, present + + # Check the recatalog, ensure it cataloged before we try to remove + present = DataSet.data_set_cataloged(name, volumes) + + if present: + try: + DataSet.delete(name) + except DatasetDeleteError: + try: + DataSet.uncatalog(name) + except DatasetUncatalogError: + try: + DataSet.catalog(name,cataloged_volume_list_original) + except DatasetCatalogError: + pass + return changed, present + try: + DataSet.catalog(name,cataloged_volume_list_original) + changed = True + present = False + except DatasetCatalogError: + changed = True + present = False + return changed, present + else: + try: + DataSet.catalog(name, volumes) + except DatasetCatalogError: + return changed, present + + present = DataSet.data_set_cataloged(name, volumes) + + if present: + DataSet.delete(name) + changed = True + present = False + else: + present = DataSet.data_set_cataloged(name, None) + if present: + try: + DataSet.delete(name) + changed = True + present = False + except DatasetDeleteError: + return changed, present + + return changed, present + @staticmethod def _is_in_vtoc(name, volume): """Determines if data set is in a volume's table of contents. @@ -1115,25 +1113,53 @@ def _catalog_vsam(name, volumes): data_set_name = name.upper() success = False command_rc = 0 - for data_set_type in ["", "LINEAR", "INDEXED", "NONINDEXED", "NUMBERED"]: - if data_set_type != "INDEXED": - command = DataSet._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( - data_set_name, - DataSet._build_volume_string_idcams(volumes), - data_set_type, - ) - else: - command = DataSet._VSAM_CATALOG_COMMAND_INDEXED.format( - data_set_name, - DataSet._build_volume_string_idcams(volumes), - data_set_type, - ) - command_rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command + command ="" + + # In order to catalog a uncataloged data set, we can't rely on LISTCAT + # so using the VTOC entries we can make some assumptions of if the data set + # is indexed, linear etc. + ds_vtoc_data_entry = vtoc.get_data_set_entry(name+".DATA", volumes[0]) + ds_vtoc_index_entry = vtoc.get_data_set_entry(name+".INDEX", volumes[0]) + + if ds_vtoc_data_entry and ds_vtoc_index_entry: + data_set_type_vsam = "INDEXED" + else: + data_set_type_vsam = "NONINDEXED" + + if data_set_type_vsam != "INDEXED": + command = DataSet._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( + data_set_name, + DataSet._build_volume_string_idcams(volumes), + data_set_type_vsam, + ) + else: + command = DataSet._VSAM_CATALOG_COMMAND_INDEXED.format( + data_set_name, + DataSet._build_volume_string_idcams(volumes), + data_set_type_vsam, + ) + + command_rc, stdout, stderr = module.run_command( + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command) + + if command_rc == 0: + success = True + # break + + if not success: + # Liberty taken such that here we can assume its a LINEAR VSAM + command = DataSet._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( + data_set_name, + DataSet._build_volume_string_idcams(volumes), + "LINEAR", ) + + command_rc, stdout, stderr = module.run_command( + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command) + if command_rc == 0: success = True - break + if not success: raise DatasetCatalogError( name, diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index c4833aa56..2eae7de27 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -34,8 +34,8 @@ ("lds"), ] -DEFAULT_VOLUME = "000000" -DEFAULT_VOLUME2 = "222222" +VOLUME_000000 = "000000" +VOLUME_222222 = "222222" DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" DEFAULT_DATA_SET_NAME_WITH_MEMBER = "USER.PRIVATE.TESTDS(TESTME)" TEMP_PATH = "/tmp/jcl" @@ -140,6 +140,9 @@ def retrieve_data_set_names(results): data_set_names.append(name) return data_set_names +def print_results(results): + for result in results.contacted.values(): + pprint(result) @pytest.mark.parametrize( "jcl", @@ -149,7 +152,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): try: hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @@ -160,13 +163,10 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): ) # verify data set creation was successful for result in results.contacted.values(): - pprint(result) if(result.get("jobs")[0].get("ret_code") is None): submitted_job_id = result.get("jobs")[0].get("job_id") assert submitted_job_id is not None results = hosts.all.zos_job_output(job_id=submitted_job_id) - print("Getting failed JOB") - pprint(vars(results)) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # verify first uncatalog was performed results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") @@ -178,13 +178,13 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): assert result.get("changed") is False # recatalog the data set results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is True # verify second catalog shows catalog already performed results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is False @@ -192,7 +192,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): # clean up hosts.all.file(path=TEMP_PATH, state="absent") # Added volumes to force a catalog in case they were somehow uncataloged to avoid an duplicate on volume error - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2]) + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=[VOLUME_000000, VOLUME_222222]) @pytest.mark.parametrize( @@ -203,9 +203,10 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): try: hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) results = hosts.all.zos_job_submit( @@ -216,7 +217,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is False @@ -226,13 +227,13 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): assert result.get("changed") is True # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=DEFAULT_VOLUME) + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000) @pytest.mark.parametrize( @@ -243,9 +244,10 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): try: hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) results = hosts.all.zos_job_submit( @@ -256,7 +258,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is False @@ -268,7 +270,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", - volumes=DEFAULT_VOLUME, + volumes=VOLUME_000000, replace=True, ) for result in results.contacted.values(): @@ -286,9 +288,10 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): try: hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) results = hosts.all.zos_job_submit( @@ -303,7 +306,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): assert result.get("changed") is True # ensure data set absent results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="absent", volumes=DEFAULT_VOLUME + name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000 ) for result in results.contacted.values(): assert result.get("changed") is True @@ -318,40 +321,44 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): ) def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ansible_zos_module, jcl): hosts = ansible_zos_module - hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=DEFAULT_VOLUME - ) + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000) + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) - results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True - ) + hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + # verify data set creation was successful for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + # uncatalog the data set results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True + # Create the same dataset name in different volume - jcl = jcl.replace(DEFAULT_VOLUME, DEFAULT_VOLUME2) + jcl = jcl.replace(VOLUME_000000, VOLUME_222222) + hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) - results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True - ) + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + # verify data set creation was successful for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + hosts.all.file(path=TEMP_PATH, state="absent") + # ensure data set absent - results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="absent", volumes=DEFAULT_VOLUME - ) + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000) + for result in results.contacted.values(): + assert result.get("changed") is True + + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") for result in results.contacted.values(): assert result.get("changed") is True - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @pytest.mark.parametrize("dstype", data_set_types) @@ -499,7 +506,7 @@ def test_data_member_force_delete(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True - # add members + #add members results = hosts.all.zos_data_set( batch=[ { @@ -672,7 +679,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) space_primary=5, space_type="CYL", record_length=15, - volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2], + volumes=[VOLUME_000000, VOLUME_222222], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -686,7 +693,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="cataloged", - volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2], + volumes=[VOLUME_000000, VOLUME_222222], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -706,7 +713,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): key_offset=0, space_primary=5, space_type="CYL", - volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2], + volumes=[VOLUME_000000, VOLUME_222222], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -720,7 +727,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="cataloged", - volumes=[DEFAULT_VOLUME, DEFAULT_VOLUME2], + volumes=[VOLUME_000000, VOLUME_222222], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -738,7 +745,7 @@ def test_data_set_old_aliases(ansible_zos_module): state="present", format="fb", size="5m", - volume=DEFAULT_VOLUME, + volume=VOLUME_000000, ) for result in results.contacted.values(): assert result.get("changed") is True From 9ce6eefd182dc28ebd1b16566fd3786d98bb4b20 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 16:21:14 -0700 Subject: [PATCH 122/495] Update module doc to explain data set deltion for given volume Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_data_set.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index dde8f3488..bcf468599 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -310,6 +310,13 @@ found in the catalog, the module attempts to perform catalog using supplied I(name) and I(volumes). If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with I(changed=False). + - > + If I(state=absent) and I(volumes) is provided, and the data set is found in + the catalog, the module compares the catalog volume attributes to the provided + I(volumes). If they volume attributes are different, the cataloged data set + will be uncataloged temporarily while the requested data set be deleted is cataloged. + The module will catalog the original data set on completion, if the attempts to + catalog fail, no action is taken. Module completes successfully with I(changed=False). - > If I(state=present) and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with I(changed=True). From 6c2669f23d3c7ecbd84ae54590ec6ba8802d6cbd Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 16:24:50 -0700 Subject: [PATCH 123/495] Update module doc to explain data set deltion for given volumegit Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_data_set.rst | 6 ++++++ plugins/modules/zos_data_set.py | 7 +++++++ 2 files changed, 13 insertions(+) diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index c310069e8..8415694b0 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -56,6 +56,9 @@ state If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + + If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. @@ -324,6 +327,9 @@ batch If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + + If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index bcf468599..b279d40d2 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -57,6 +57,13 @@ found in the catalog, the module attempts to perform catalog using supplied I(name) and I(volumes). If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with I(changed=False). + - > + If I(state=absent) and I(volumes) is provided, and the data set is found in + the catalog, the module compares the catalog volume attributes to the provided + I(volumes). If they volume attributes are different, the cataloged data set + will be uncataloged temporarily while the requested data set be deleted is cataloged. + The module will catalog the original data set on completion, if the attempts to + catalog fail, no action is taken. Module completes successfully with I(changed=False). - > If I(state=present) and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with I(changed=True). From 328dd16bb687fe042bf2981376914ad33b20a39e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 16:29:40 -0700 Subject: [PATCH 124/495] Unbound local var fix Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/functional/modules/test_zos_data_set_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 2eae7de27..63301d2e8 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -327,7 +327,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) - hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) # verify data set creation was successful for result in results.contacted.values(): From d52c2a8996a5043c38a030568fd030eb8d75fa45 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 16:50:03 -0700 Subject: [PATCH 125/495] added changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --- .../791-doc-zos_data_set-member-update.yml | 5 ----- .../fragments/791-zos_data_set-update-vsam.yml | 17 +++++++++++++++++ 2 files changed, 17 insertions(+), 5 deletions(-) delete mode 100644 changelogs/fragments/791-doc-zos_data_set-member-update.yml create mode 100644 changelogs/fragments/791-zos_data_set-update-vsam.yml diff --git a/changelogs/fragments/791-doc-zos_data_set-member-update.yml b/changelogs/fragments/791-doc-zos_data_set-member-update.yml deleted file mode 100644 index 4ab0eee03..000000000 --- a/changelogs/fragments/791-doc-zos_data_set-member-update.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: -- zos_data_set - when a member is created by the module, the format is type - data which is not suitable for executables. This change describes the - format used when creating member. - (https://github.com/ansible-collections/ibm_zos_core/pull/791) \ No newline at end of file diff --git a/changelogs/fragments/791-zos_data_set-update-vsam.yml b/changelogs/fragments/791-zos_data_set-update-vsam.yml new file mode 100644 index 000000000..00ec67495 --- /dev/null +++ b/changelogs/fragments/791-zos_data_set-update-vsam.yml @@ -0,0 +1,17 @@ +trivial: +- zos_data_set - when a member is created by the module, the format is type + data which is not suitable for executables. This change describes the + format used when creating member. + (https://github.com/ansible-collections/ibm_zos_core/pull/791) +- ac - Reported an issue when functional tests ran leaving behind files. Fix + now removes the unwanted files. + (https://github.com/ansible-collections/ibm_zos_core/pull/791) +bugfixes: +- zos_copy - Reported a failure caused when `present=absent` for a VSAM + data set leaving behind cluster components. Fix introduces a new logical + flow that will evaluate the volumes, compare it to the provided value and + if necessary catalog and delete. + (https://github.com/ansible-collections/ibm_zos_core/pull/762). +- module_utils - data_set.py - Reported a failure caused when cataloging a + VSAM data set. Fix now corrects how VSAM data sets are cataloged. + (https://github.com/ansible-collections/ibm_zos_core/pull/762). \ No newline at end of file From acdd1bbce04593ae9a61387d38e256de972c9ad8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 9 Jun 2023 17:08:06 -0700 Subject: [PATCH 126/495] Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/data_set.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index f1aa1ee22..f1d8152a8 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -210,9 +210,7 @@ def ensure_absent(name, volumes=None): Returns: changed (bool) -- Indicates if changes were made. """ - - changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes) - + changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes) return changed # ? should we do additional check to ensure member was actually created? @@ -350,7 +348,7 @@ def data_set_cataloged(name, volumes=None): if volumes: cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name) or [] - if bool(set(volumes) & set (cataloged_volume_list)): + if bool(set(volumes) & set(cataloged_volume_list)): return True else: if re.search(r"-\s" + name + r"\s*\n\s+IN-CAT", stdout): @@ -729,7 +727,7 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): except DatasetCatalogError: try: # A failure, so recatalog the original data set on the original volumes - DataSet.catalog(name,cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original) except DatasetCatalogError: pass return changed, present @@ -745,12 +743,12 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): DataSet.uncatalog(name) except DatasetUncatalogError: try: - DataSet.catalog(name,cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original) except DatasetCatalogError: pass return changed, present try: - DataSet.catalog(name,cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original) changed = True present = False except DatasetCatalogError: @@ -1113,13 +1111,13 @@ def _catalog_vsam(name, volumes): data_set_name = name.upper() success = False command_rc = 0 - command ="" + command = "" # In order to catalog a uncataloged data set, we can't rely on LISTCAT # so using the VTOC entries we can make some assumptions of if the data set # is indexed, linear etc. - ds_vtoc_data_entry = vtoc.get_data_set_entry(name+".DATA", volumes[0]) - ds_vtoc_index_entry = vtoc.get_data_set_entry(name+".INDEX", volumes[0]) + ds_vtoc_data_entry = vtoc.get_data_set_entry(name + ".DATA", volumes[0]) + ds_vtoc_index_entry = vtoc.get_data_set_entry(name + ".INDEX", volumes[0]) if ds_vtoc_data_entry and ds_vtoc_index_entry: data_set_type_vsam = "INDEXED" From 55dd8710a8bea1bc6a6a0b9a218bb4be21c2e451 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sat, 10 Jun 2023 22:44:42 -0700 Subject: [PATCH 127/495] Update changelog based on PR feedback Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/791-zos_data_set-update-vsam.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/changelogs/fragments/791-zos_data_set-update-vsam.yml b/changelogs/fragments/791-zos_data_set-update-vsam.yml index 00ec67495..c4ad9901c 100644 --- a/changelogs/fragments/791-zos_data_set-update-vsam.yml +++ b/changelogs/fragments/791-zos_data_set-update-vsam.yml @@ -7,11 +7,11 @@ trivial: now removes the unwanted files. (https://github.com/ansible-collections/ibm_zos_core/pull/791) bugfixes: -- zos_copy - Reported a failure caused when `present=absent` for a VSAM +- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. - (https://github.com/ansible-collections/ibm_zos_core/pull/762). + (https://github.com/ansible-collections/ibm_zos_core/pull/791). - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. - (https://github.com/ansible-collections/ibm_zos_core/pull/762). \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/791). \ No newline at end of file From 6c704574d5da8191234b507cca25dddef4ce68b8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 11 Jun 2023 12:44:54 -0700 Subject: [PATCH 128/495] Increase ansible supported version to 2.15 Signed-off-by: ddimatos <dimatos@gmail.com> --- meta/runtime.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meta/runtime.yml b/meta/runtime.yml index dbba1c7ce..37ae81b1f 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9,<2.15' +requires_ansible: '>=2.9,<2.16' From 923553931c51b0fca14a4a2c585a31991e9a52cf Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 11 Jun 2023 12:45:26 -0700 Subject: [PATCH 129/495] remove unused imports Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/data_set.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index f1d8152a8..647f26871 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -13,7 +13,6 @@ __metaclass__ = type -import pprint import re import tempfile from os import path, walk @@ -40,11 +39,9 @@ vtoc = MissingImport("vtoc") try: - from zoautil_py import datasets, mvscmd, types + from zoautil_py import datasets except ImportError: datasets = MissingZOAUImport() - mvscmd = MissingZOAUImport() - types = MissingZOAUImport() class DataSet(object): From 5f9ef2352967c07869eaaea9efde4e32a71086bd Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 11 Jun 2023 12:50:44 -0700 Subject: [PATCH 130/495] Added 2.16 ignore since our pipeline supports devel which is at this time 2.16 Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/sanity/ignore-2.16.txt | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 tests/sanity/ignore-2.16.txt diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt new file mode 100644 index 000000000..8b4540038 --- /dev/null +++ b/tests/sanity/ignore-2.16.txt @@ -0,0 +1,35 @@ +plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. +plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_copy.py validate-modules:doc-default-does-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. +plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc type should be str, while spec type is path to allow user path expansion +plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin +plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. +plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. +plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. +plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin +plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin +plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin +plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited +plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 494832e7df2458c11cc4800e9db43ddf57cee8ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 12 Jun 2023 16:36:02 -0600 Subject: [PATCH 131/495] Change the line for the functional one (#805) --- tests/functional/modules/test_zos_copy_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 374bf2b47..86d5e7564 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -545,7 +545,7 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_ assert result.get("stdout") == DUMMY_DATA finally: hosts.all.file(name=dest_path, state="absent") - source_1.cleanup(ignore_cleanup_errors = True) + source_1.cleanup() @pytest.mark.uss From a9507f6e24655cf191c8442da5abe5348afafb3a Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 16 Jun 2023 13:50:27 -0700 Subject: [PATCH 132/495] Add ansible-lint tooling added (#812) * Add ansible-lint tooling Signed-off-by: ddimatos <dimatos@gmail.com> * add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * missing hyphen from command doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac command with doc corrections Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .ansible-lint | 35 +++++++++++++++++++++++ ac | 28 +++++++++++++++--- changelogs/fragments/812-ansible-lint.yml | 4 +++ galaxy.yml | 3 ++ meta/runtime.yml | 2 +- 5 files changed, 67 insertions(+), 5 deletions(-) create mode 100644 .ansible-lint create mode 100644 changelogs/fragments/812-ansible-lint.yml diff --git a/.ansible-lint b/.ansible-lint new file mode 100644 index 000000000..a83129368 --- /dev/null +++ b/.ansible-lint @@ -0,0 +1,35 @@ +exclude_paths: + - .tar.gz + - __pycache__/ + - .cache/ + - .DS_Store + - .git/ + - .github/ + - .gitignore + - .python-version + - .pytest_cache/ + - .vscode/ + - Jenkinsfile + - ac + - ansible.cfg + - changelogs/ + - collections/ + - docs/ + - scripts/ + - test_config.yml + - tests/*.ini + - tests/*.py + - tests/.pytest_cache + - tests/__pycache__ + - tests/functional + - tests/helpers + - tests/requirements.txt + - tests/unit + - tests/sanity/ignore-2.9.txt + - tests/sanity/ignore-2.10.txt + - tests/sanity/ignore-2.11.txt + - venv* +parseable: true +quiet: false +use_default_rules: true +verbosity: 1 diff --git a/ac b/ac index 3d0394cac..1d06757f5 100755 --- a/ac +++ b/ac @@ -205,8 +205,8 @@ option_sanitize(){ # ------------------------------------------------------------------------------ #->ac-bandit: ## Run a bandit security scan on the plugins directory, set the severity level. -## Usage: ac [-s <level>, --bandit <level>] -## Usage: ac [-s <level>, --bandit <level>] +## Usage: ac [--bandit <level>] +## Usage: ac [--bandit <level>] ## <level> - choose from 'l', 'll', 'lll' ## - l all low, medium, high severity ## - ll all medium, high severity @@ -230,7 +230,7 @@ ac_bandit(){ # ------------------------------------------------------------------------------ #->ac-build: ## Build and install collection of the local GH branch. -## Usage: ac [-b, --ac-build] +## Usage: ac [--ac-build] ## Example: ## $ ac --ac-build ac_build(){ @@ -265,13 +265,27 @@ ac_install(){ fi } +# ------------------------------------------------------------------------------ +# Run ansible-lint on the locally checked out GH Branch +# ------------------------------------------------------------------------------ +#->ac-lint: +## Run ansible-lint on the local GH branch with the production profile. +## Usage: ac [--ac-lint] +## Example: +## $ ac --ac-lint +ac_ansible_lint(){ + gh_branch=`git branch |grep "*" | cut -d" " -f2` + message "Linting with ansible-lint on GH branch: '$gh_branch'." + . $VENV_BIN/activate && $VENV_BIN/ansible-lint --profile production +} + # ------------------------------------------------------------------------------ # Run the sanity test using docker given python version else default to venv # ------------------------------------------------------------------------------ #->ac-sanity: ## Run ansible-test in docker if the docker engine is running, else run them in ## a managed virtual environment using the installed python version. -## Usage: ac [--ac-lint] [--version <version>] +## Usage: ac [--ac-sanity] [--version <version>] ## Options: ## <version> - Only applies to when docker is running. ## - No version selection will run all available python versions in docker. @@ -644,6 +658,10 @@ while true; do ensure_managed_venv_exists $1 # Command option_submitted="--ac-install" ;; + --ac-lint) + ensure_managed_venv_exists $1 # Command + option_submitted="--ac-lint" + ;; --ac-sanity |--ac-sanity=?*) # Command ensure_managed_venv_exists $1 option_submitted="--ac-sanity" @@ -785,6 +803,8 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then ac_build elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then ac_install $version +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-lint" ] ; then + ac_ansible_lint elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-sanity" ] ; then ac_sanity $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test" ] ; then diff --git a/changelogs/fragments/812-ansible-lint.yml b/changelogs/fragments/812-ansible-lint.yml new file mode 100644 index 000000000..0cb520884 --- /dev/null +++ b/changelogs/fragments/812-ansible-lint.yml @@ -0,0 +1,4 @@ +trivial: +- ansible-lint - enabling ansible-lint for 2.15 and Ansible Automation Platform + certification. + (https://github.com/ansible-collections/ibm_zos_core/pull/812) diff --git a/galaxy.yml b/galaxy.yml index e4b998278..fad6b8f5c 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -90,4 +90,7 @@ build_ignore: - tests/helpers - tests/requirements.txt - tests/unit + - tests/sanity/ignore-2.9.txt + - tests/sanity/ignore-2.10.txt + - tests/sanity/ignore-2.11.txt - venv* diff --git a/meta/runtime.yml b/meta/runtime.yml index 37ae81b1f..9f9843979 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9,<2.16' +requires_ansible: '>=2.12.00' From ee0dfd4c820e70ad02319aeb7f11d2985b169ce8 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 23 Jun 2023 13:19:53 -0400 Subject: [PATCH 133/495] 439 addf (#821) * initial changes to support F format * adding F option, added basic test looking for failure during ensure-present * added print_results to a failing uncatalog test. * adding more preint_result statements to track down cataloging issue * removed other print statements, added one back (cataloging is just plain finicky) * corrected volume name on new test * removed extra print statement from test code. Added Changelog fragment. * Expanded test case to try 1 of each record format creation. Added mention of 'F' into the documentation of record_format in dataset.py --- changelogs/fragments/439-add-f-recordtype.yml | 4 ++++ plugins/module_utils/data_set.py | 8 +++---- plugins/modules/zos_data_set.py | 3 +++ .../modules/test_zos_data_set_func.py | 21 +++++++++++++++++++ 4 files changed, 32 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/439-add-f-recordtype.yml diff --git a/changelogs/fragments/439-add-f-recordtype.yml b/changelogs/fragments/439-add-f-recordtype.yml new file mode 100644 index 000000000..6c5e72f49 --- /dev/null +++ b/changelogs/fragments/439-add-f-recordtype.yml @@ -0,0 +1,4 @@ +minor_changes: +- zos_data_set - record format = 'F' has been added to support 'fixed' block records. + This allows records that can use the entire block. + (https://github.com/ansible-collections/ibm_zos_core/pull/821) \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 647f26871..69e1190f1 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -124,7 +124,7 @@ def ensure_present( space_type (str, optional): The unit of measurement to use when defining primary and secondary space. Defaults to None. record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. + Valid options are: F, FB, VB, FBA, VBA, U. Defaults to None. record_length (int, optional) The length, in bytes, of each record in the data set. Defaults to None. @@ -832,7 +832,7 @@ def replace( space_type (str, optional): The unit of measurement to use when defining primary and secondary space. Defaults to None. record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. + Valid options are: F, FB, VB, FBA, VBA, U. Defaults to None. record_length (int, optional) The length, in bytes, of each record in the data set. Defaults to None. @@ -949,7 +949,7 @@ def create( space_type (str, optional): The unit of measurement to use when defining primary and secondary space. Defaults to None. record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. + Valid options are: F, FB, VB, FBA, VBA, U. Defaults to None. record_length (int, optional) The length, in bytes, of each record in the data set. Defaults to None. @@ -1317,7 +1317,7 @@ def create_temp( Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. Defaults to "SEQ". record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. + Valid options are: F, FB, VB, FBA, VBA, U. Defaults to "FB". space_primary (int, optional): The amount of primary space to allocate for the dataset. Defaults to 5. diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index b279d40d2..ca6012978 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -168,6 +168,7 @@ - FBA - VBA - U + - F type: str default: FB sms_storage_class: @@ -428,6 +429,7 @@ - FBA - VBA - U + - F type: str default: FB sms_storage_class: @@ -698,6 +700,7 @@ "FBA", "VBA", "U", + "F", ] DEFAULT_RECORD_LENGTHS = { diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 63301d2e8..0a3972646 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -907,3 +907,24 @@ def test_data_set_creation_with_tmp_hlq(ansible_zos_module): finally: if dsname: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + +@pytest.mark.parametrize( + "formats", + ["F","FB", "VB", "FBA", "VBA", "U"], +) +def test_data_set_f_formats(ansible_zos_module, formats): + try: + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + results = hosts.all.zos_data_set( + name=DEFAULT_DATA_SET_NAME, + state="present", + format=formats, + size="5m", + volume=VOLUME_000000, + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + finally: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") From b37ab341176b9c8447cc72c558988d37fb2d5842 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 23 Jun 2023 14:54:54 -0600 Subject: [PATCH 134/495] Bugfix/769/mode option does not behave the same way that it does in the community module (#795) * First suggestion * Add files to be overwriten to the files to be changed * Add functionality to test case to ensure behaivour * Add test case for keep behaivour * Delete test repetition * Delete test case from other branch * Change test cases to ensure works as ansible module * Add fragment and change variable names for clarity * Get better test case and comments * Restore test --------- Co-authored-by: Demetri <dimatos@gmail.com> --- .../795_overwrite_permissions_on_copy.yml | 4 ++ plugins/modules/zos_copy.py | 19 +++--- .../functional/modules/test_zos_copy_func.py | 59 +++++++------------ 3 files changed, 35 insertions(+), 47 deletions(-) create mode 100644 changelogs/fragments/795_overwrite_permissions_on_copy.yml diff --git a/changelogs/fragments/795_overwrite_permissions_on_copy.yml b/changelogs/fragments/795_overwrite_permissions_on_copy.yml new file mode 100644 index 000000000..2a8d826d7 --- /dev/null +++ b/changelogs/fragments/795_overwrite_permissions_on_copy.yml @@ -0,0 +1,4 @@ +bugfixes: +- zos_copy - kept permissions on target directory when copy overwrote + files. The fix now set permissions when mode is given. + (https://github.com/ansible-collections/ibm_zos_core/pull/795) \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index ffb9ccbe4..831b2db87 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1241,27 +1241,30 @@ def _get_changed_files(self, src, dest, copy_directory): for the files and directories already present on the destination. """ - copied_files = self._walk_uss_tree(src) + files_to_copy = self._walk_uss_tree(src) # It's not needed to normalize the path because it was already normalized # on _copy_to_dir. parent_dir = os.path.basename(src) if copy_directory else '' - changed_files = [] - original_files = [] - for relative_path in copied_files: + files_to_change = [] + existing_files = [] + for relative_path in files_to_copy: if os.path.exists(os.path.join(dest, parent_dir, relative_path)): - original_files.append(relative_path) + existing_files.append(relative_path) else: - changed_files.append(relative_path) + files_to_change.append(relative_path) + # This change adds to the files_to_change variable any file that accord with + # a name found in the source copy. + files_to_change.extend(existing_files) # Creating tuples with (filename, permissions). original_permissions = [ (filepath, os.stat(os.path.join(dest, parent_dir, filepath)).st_mode) - for filepath in original_files + for filepath in existing_files ] - return changed_files, original_permissions + return files_to_change, original_permissions def _walk_uss_tree(self, dir): """Walks the tree directory for dir and returns all relative paths diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 86d5e7564..41efc4ee4 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -17,6 +17,7 @@ import os import shutil import re +import time import tempfile from tempfile import mkstemp @@ -933,7 +934,7 @@ def test_copy_local_dir_and_change_mode(ansible_zos_module, copy_directory): for result in stat_overwritten_file_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is False - assert result.get("stat").get("mode") == dest_mode + assert result.get("stat").get("mode") == mode for result in stat_new_file_res.contacted.values(): assert result.get("stat").get("exists") is True @@ -1027,7 +1028,7 @@ def test_copy_uss_dir_and_change_mode(ansible_zos_module, copy_directory): for result in stat_overwritten_file_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is False - assert result.get("stat").get("mode") == dest_mode + assert result.get("stat").get("mode") == mode for result in stat_new_file_res.contacted.values(): assert result.get("stat").get("exists") is True @@ -1110,52 +1111,32 @@ def test_copy_non_existent_file_fails(ansible_zos_module, is_remote): def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, src): hosts = ansible_zos_module dest_path = "/tmp/test/" + mode = "750" + other_mode = "744" + mode_overwrite = "0777" + full_path = "{0}/profile".format(dest_path) try: - hosts.all.file(path=dest_path, state="directory", mode="750") - permissions_before = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) - hosts.all.zos_copy(content=src["src"], dest=dest_path) - permissions = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) + hosts.all.file(path=dest_path, state="directory", mode=mode) + permissions_before = hosts.all.stat(path=dest_path) + hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=other_mode) + permissions = hosts.all.stat(path=dest_path) for before in permissions_before.contacted.values(): - permissions_be_copy = before.get("stdout") - - for after in permissions.contacted.values(): - permissions_af_copy = after.get("stdout") - - permissions_be_copy = permissions_be_copy.splitlines()[1].split()[0] - permissions_af_copy = permissions_af_copy.splitlines()[1].split()[0] - - assert permissions_be_copy == permissions_af_copy - finally: - hosts.all.file(path=dest_path, state="absent") - - -@pytest.mark.uss -@pytest.mark.parametrize("src", [ - dict(src="/etc/", is_remote=False), - dict(src="/etc/", is_remote=True),]) -def test_ensure_copy_directory_does_not_change_permission_on_dest(ansible_zos_module, src): - hosts = ansible_zos_module - dest_path = "/tmp/test/" - try: - hosts.all.file(path=dest_path, state="directory", mode="750") - permissions_before = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) - hosts.all.zos_copy(content=src["src"], dest=dest_path) - permissions = hosts.all.shell(cmd="ls -la {0}".format(dest_path)) - - for before in permissions_before.contacted.values(): - permissions_be_copy = before.get("stdout") + permissions_be_copy = before.get("stat").get("mode") for after in permissions.contacted.values(): - permissions_af_copy = after.get("stdout") + permissions_af_copy = after.get("stat").get("mode") - permissions_be_copy = permissions_be_copy.splitlines()[1].split()[0] - permissions_af_copy = permissions_af_copy.splitlines()[1].split()[0] - assert permissions_be_copy == permissions_af_copy + + # Extra asserts to ensure change mode rewrite a copy + hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=mode_overwrite) + permissions_overwriten = hosts.all.stat(path = full_path) + for over in permissions_overwriten.contacted.values(): + assert over.get("stat").get("mode") == mode_overwrite finally: hosts.all.file(path=dest_path, state="absent") - + @pytest.mark.uss @pytest.mark.seq From 04486401c2e89cf9f2e86d3c2c9afda4211b048e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 27 Jun 2023 11:13:16 -0600 Subject: [PATCH 135/495] bugfix/823/Return destination attributes had hardcoded type and record format (#824) * Add solution * Add fragment --- ...nation_attributes_had_hardcoded_type_and_recordformat.yml | 5 +++++ plugins/modules/zos_copy.py | 4 ++++ 2 files changed, 9 insertions(+) create mode 100644 changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml diff --git a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml new file mode 100644 index 000000000..8f4246f85 --- /dev/null +++ b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml @@ -0,0 +1,5 @@ +bugfix: +- zos_copy - Module returned the dynamic values created with the same dataset type + and record format. Fix validate the correct dataset type and record format of + target created. + (https://github.com/ansible-collections/ibm_zos_core/pull/824) \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 831b2db87..fd7c8feac 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2191,6 +2191,10 @@ def allocate_destination_data_set( data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) if dest_ds_type not in data_set.DataSet.MVS_VSAM: dest_params = get_attributes_of_any_dataset_created(dest, src_ds_type, src, src_name, is_binary, volume) + dest_attributes = datasets.listing(dest)[0] + record_format = dest_attributes.recfm + dest_params["type"] = dest_ds_type + dest_params["record_format"] = record_format return True, dest_params From 5703246635e0deb84d4eba31359b6d5004216da7 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 27 Jun 2023 10:31:17 -0700 Subject: [PATCH 136/495] Bufix: Fixes zos_copy and zos_fetch deprecation msg for using _play_context.verbosity (#806) * Add new test case for verbosity check Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy tests to support display.verbosity and nested encoding Signed-off-by: ddimatos <dimatos@gmail.com> * Update test framewor to provide support for adhoc module calls Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_fetch plugin to use the display.verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Lint correction Signed-off-by: ddimatos <dimatos@gmail.com> * Changlog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Change from shell to raw module usage Signed-off-by: ddimatos <dimatos@gmail.com> * remove verbosity from test Signed-off-by: ddimatos <dimatos@gmail.com> * correct indentation Signed-off-by: ddimatos <dimatos@gmail.com> * update changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../791-zos_data_set-update-vsam-copy.yml | 11 ++ ...=> 806-zos_copy_fetch-display-verbose.yml} | 0 plugins/action/zos_copy.py | 9 +- plugins/action/zos_fetch.py | 9 +- tests/conftest.py | 13 +- .../functional/modules/test_zos_copy_func.py | 155 ++++++++++++++---- tests/helpers/ztest.py | 4 + 7 files changed, 160 insertions(+), 41 deletions(-) create mode 100644 changelogs/fragments/791-zos_data_set-update-vsam-copy.yml rename changelogs/fragments/{791-zos_data_set-update-vsam.yml => 806-zos_copy_fetch-display-verbose.yml} (100%) diff --git a/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml b/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml new file mode 100644 index 000000000..3d29e906e --- /dev/null +++ b/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml @@ -0,0 +1,11 @@ +bugfixes: +- zos_copy - Test case for recursive encoding directories reported a + UTF-8 failure. This change ensures proper test coverage for nested + directories and file permissions. + (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. + (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_fetch - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. + (https://github.com/ansible-collections/ibm_zos_core/pull/806). \ No newline at end of file diff --git a/changelogs/fragments/791-zos_data_set-update-vsam.yml b/changelogs/fragments/806-zos_copy_fetch-display-verbose.yml similarity index 100% rename from changelogs/fragments/791-zos_data_set-update-vsam.yml rename to changelogs/fragments/806-zos_copy_fetch-display-verbose.yml diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 5fa861b61..b98432aa6 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -258,9 +258,10 @@ def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): display.vvv(u"ibm_zos_copy return code: {0}".format(returncode), host=self._play_context.remote_addr) display.vvv(u"ibm_zos_copy stdout: {0}".format(stdout), host=self._play_context.remote_addr) display.vvv(u"ibm_zos_copy stderr: {0}".format(stderr), host=self._play_context.remote_addr) - display.vvv(u"play context verbosity: {0}".format(self._play_context.verbosity), host=self._play_context.remote_addr) - err = _detect_sftp_errors(stderr) + ansible_verbosity = None + ansible_verbosity = display.verbosity + display.vvv(u"play context verbosity: {0}".format(ansible_verbosity), host=self._play_context.remote_addr) # ************************************************************************* # # When plugin shh connection member _build_command(..) detects verbosity # @@ -275,7 +276,9 @@ def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): # the verbosity is returned as 'stderr'. # # ************************************************************************* # - if self._play_context.verbosity > 3: + err = _detect_sftp_errors(stderr) + + if ansible_verbosity > 3: ignore_stderr = True if returncode != 0 or (err and not ignore_stderr): diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 67bd83981..e10dbd75f 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -343,9 +343,10 @@ def _transfer_remote_content( display.vvv(u"ibm_zos_fetch return code: {0}".format(returncode), host=self._play_context.remote_addr) display.vvv(u"ibm_zos_fetch stdout: {0}".format(stdout), host=self._play_context.remote_addr) display.vvv(u"ibm_zos_fetch stderr: {0}".format(stderr), host=self._play_context.remote_addr) - display.vvv(u"play context verbosity: {0}".format(self._play_context.verbosity), host=self._play_context.remote_addr) - err = _detect_sftp_errors(stderr) + ansible_verbosity = None + ansible_verbosity = display.verbosity + display.vvv(u"play context verbosity: {0}".format(ansible_verbosity), host=self._play_context.remote_addr) # ************************************************************************* # # When plugin shh connection member _build_command(..) detects verbosity # @@ -360,7 +361,9 @@ def _transfer_remote_content( # the verbosity is returned as 'stderr'. # # ************************************************************************* # - if self._play_context.verbosity > 3: + err = _detect_sftp_errors(stderr) + + if ansible_verbosity > 3: ignore_stderr = True if re.findall(r"Permission denied", err): diff --git a/tests/conftest.py b/tests/conftest.py index 980edb332..506214f29 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -38,7 +38,8 @@ def z_python_interpreter(request): helper = ZTestHelper.from_yaml_file(path) interpreter_str = helper.build_interpreter_string() inventory = helper.get_inventory_info() - yield (interpreter_str, inventory) + python_path = helper.get_python_path() + yield (interpreter_str, inventory, python_path) def clean_logs(adhoc): @@ -62,12 +63,18 @@ def clean_logs(adhoc): def ansible_zos_module(request, z_python_interpreter): """ Initialize pytest-ansible plugin with values from our YAML config and inject interpreter path into inventory. """ - interpreter, inventory = z_python_interpreter + interpreter, inventory, python_path = z_python_interpreter + # next two lines perform similar action to ansible_adhoc fixture plugin = request.config.pluginmanager.getplugin("ansible") adhoc = plugin.initialize(request.config, request, **inventory) - # * Inject our environment + + # Inject our environment hosts = adhoc["options"]["inventory_manager"]._inventory.hosts + + # Courtesy, pass along the python_path for some test cases need this information + adhoc["options"]["ansible_python_path"] = python_path + for host in hosts.values(): host.vars["ansible_python_interpreter"] = interpreter # host.vars["ansible_connection"] = "zos_ssh" diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 41efc4ee4..7b69a75b4 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -20,6 +20,7 @@ import time import tempfile from tempfile import mkstemp +import subprocess __metaclass__ = type @@ -471,45 +472,99 @@ def test_copy_dir_to_existing_uss_dir_not_forced(ansible_zos_module): @pytest.mark.uss def test_copy_subdirs_folders_and_validate_recursive_encoding(ansible_zos_module): hosts = ansible_zos_module - dest_path = "/tmp/test/" - text_outer_file = "Hi I am point A" - text_inner_file = "Hi I am point B" - src_path = "/tmp/level_1/" - outer_file = "/tmp/level_1/text_A.txt" - inner_src_path = "/tmp/level_1/level_2/" - inner_file = "/tmp/level_1/level_2/text_B.txt" - - try: - hosts.all.file(path=inner_src_path, state="directory") - hosts.all.file(path=inner_file, state = "touch") - hosts.all.file(path=outer_file, state = "touch") - hosts.all.shell(cmd="echo '{0}' > '{1}'".format(text_outer_file, outer_file)) - hosts.all.shell(cmd="echo '{0}' > '{1}'".format(text_inner_file, inner_file)) - - copy_res = hosts.all.zos_copy(src=src_path, dest=dest_path, encoding={"from": "ISO8859-1", "to": "IBM-1047"}, remote_src=True) + + # Remote path + path = "/tmp/ansible" + + # Remote src path with original files + src_path = path + "/src" + + # Nested src dirs + src_dir_one = src_path + "/dir_one" + src_dir_two = src_dir_one + "/dir_two" + src_dir_three = src_dir_two + "/dir_three" + + # Nested src IBM-1047 files + src_file_one = src_path + "/dir_one/one.txt" + src_file_two = src_dir_one + "/dir_two/two.txt" + src_file_three = src_dir_two + "/dir_three/three.txt" + + # Remote dest path to encoded files placed + dest_path = path + "/dest" + + # Nested dest UTF-8 files + dst_file_one = dest_path + "/dir_one/one.txt" + dst_file_two = dest_path + "/dir_one/dir_two/two.txt" + dst_file_three = dest_path + "/dir_one/dir_two//dir_three/three.txt" + + # Strings echo'd to files on USS + str_one = "This is file one." + str_two = "This is file two." + str_three = "This is file three." + + # Hex values for expected results, expected used beause pytest-ansible does not allow for delegate_to + # and depending on where the `od` runs, you may face big/little endian issues, so using expected utf-8 + str_one_big_endian_hex="""0000000000 5468 6973 2069 7320 6669 6C65 206F 6E65 +0000000020 2E0A +0000000022""" + + str_two_big_endian_hex="""0000000000 5468 6973 2069 7320 6669 6C65 2074 776F +0000000020 2E0A +0000000022""" + + str_three_big_endian_hex="""0000000000 5468 6973 2069 7320 6669 6C65 2074 6872 +0000000020 6565 2E0A +0000000024""" + + try: + # Ensure clean slate + results = hosts.all.file(path=path, state="absent") + + # Create nested directories + hosts.all.file(path=src_dir_three, state="directory", mode="0755") + + # Touch empty files + hosts.all.file(path=src_file_one, state = "touch") + hosts.all.file(path=src_file_two, state = "touch") + hosts.all.file(path=src_file_three, state = "touch") + + # Echo contents into files (could use zos_lineinfile or zos_copy), echo'ing will + # result in managed node's locale which currently is IBM-1047 + hosts.all.raw("echo '{0}' > '{1}'".format(str_one, src_file_one)) + hosts.all.raw("echo '{0}' > '{1}'".format(str_two, src_file_two)) + hosts.all.raw("echo '{0}' > '{1}'".format(str_three, src_file_three)) + + # Lets stat the deepest nested directory, not necessary to stat all of them + results = hosts.all.stat(path=src_file_three) + for result in results.contacted.values(): + assert result.get("stat").get("exists") is True + + # Nested zos_copy from IBM-1047 to UTF-8 + # Testing src ending in / such that the contents of the src directory will be copied + copy_res = hosts.all.zos_copy(src=src_path+"/", dest=dest_path, encoding={"from": "IBM-1047", "to": "UTF-8"}, remote_src=True) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True - stat_res = hosts.all.stat(path="/tmp/test/level_2/") - for st in stat_res.contacted.values(): - assert st.get("stat").get("exists") is True + # File z/OS dest is now UTF-8, dump the hex value and compare it to an + # expected big-endian version, can't run delegate_to local host so expected + # value is the work around for now. + str_one_od_dst = hosts.all.shell(cmd="od -x {0}".format(dst_file_one)) + str_two_od_dst = hosts.all.shell(cmd="od -x {0}".format(dst_file_two)) + str_three_od_dst = hosts.all.shell(cmd="od -x {0}".format(dst_file_three)) + + for result in str_one_od_dst.contacted.values(): + assert result.get("stdout") == str_one_big_endian_hex + + for result in str_two_od_dst.contacted.values(): + assert result.get("stdout") == str_two_big_endian_hex - full_inner_path = dest_path + "/level_2/text_B.txt" - full_outer_path = dest_path + "/text_A.txt" - inner_file_text_aft_encoding = hosts.all.shell(cmd="cat {0}".format(full_inner_path)) - outer_file_text_aft_encoding = hosts.all.shell(cmd="cat {0}".format(full_outer_path)) - for text in outer_file_text_aft_encoding.contacted.values(): - text_outer = text.get("stdout") - for text in inner_file_text_aft_encoding.contacted.values(): - text_inner = text.get("stdout") + for result in str_three_od_dst.contacted.values(): + assert result.get("stdout") == str_three_big_endian_hex - assert text_inner == text_inner_file - assert text_outer == text_outer_file finally: - hosts.all.file(path=src_path, state="absent") - hosts.all.file(path=dest_path, state="absent") + hosts.all.file(path=path, state="absent") @pytest.mark.uss @@ -2849,4 +2904,40 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( assert v_cp.get("rc") == 0 finally: hosts.all.zos_data_set(name=dest, state="absent") - \ No newline at end of file + + +@pytest.mark.parametrize("options", [ + dict(src="/etc/profile", dest="/tmp/zos_copy_test_profile", + force=True, is_remote=False, verbosity="-vvvvv", verbosity_level=5), + dict(src="/etc/profile", dest="/mp/zos_copy_test_profile", force=True, + is_remote=False, verbosity="-vvvv", verbosity_level=4), + dict(src="/etc/profile", dest="/tmp/zos_copy_test_profile", + force=True, is_remote=False, verbosity="", verbosity_level=0), +]) +def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): + """Test the display verbosity, ensure it matches the verbosity_level. + This test requires access to verbosity and pytest-ansbile provides no + reasonable handle for this so for now subprocess is used. This test + results in no actual copy happening, the interest is in the verbosity""" + + try: + hosts = ansible_zos_module + user = hosts["options"]["user"] + # Optionally hosts["options"]["inventory_manager"].list_hosts()[0] + node = hosts["options"]["inventory"].rstrip(',') + python_path = hosts["options"]["ansible_python_path"] + + # This is an adhoc command, because there was no + cmd = "ansible all -i " + str(node) + ", -u " + user + " -m ibm.ibm_zos_core.zos_copy -a \"src=" + options["src"] + " dest=" + options["dest"] + " is_remote=" + str( + options["is_remote"]) + " encoding={{enc}} \" -e '{\"enc\":{\"from\": \"ISO8859-1\", \"to\": \"IBM-1047\"}}' -e \"ansible_python_interpreter=" + python_path + "\" " + options["verbosity"] + "" + + result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout + output = result.read().decode() + + if options["verbosity_level"] != 0: + assert ("play context verbosity: "+ str(options["verbosity_level"])+"" in output) + else: + assert ("play context verbosity:" not in output) + + finally: + hosts.all.file(path=options["dest"], state="absent") diff --git a/tests/helpers/ztest.py b/tests/helpers/ztest.py index 4115da106..af198d6f0 100644 --- a/tests/helpers/ztest.py +++ b/tests/helpers/ztest.py @@ -64,3 +64,7 @@ def build_interpreter_string(self): interpreter_string += "export {0}={1} ; ".format(key, value) interpreter_string += self._python_path return interpreter_string + + def get_python_path(self): + """ Returns python path """ + return self._python_path From f3eae2c45ad465d472f1301b258de6339c18765d Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Thu, 6 Jul 2023 08:47:49 -0700 Subject: [PATCH 137/495] Stagging v1.6.0 merge into dev (#832) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * update zoau version checker and add unit testing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Bugfix/769/1.6/zos copy does not overwrite permission on overwriten copy as comunity module (#790) * Change function behaivour, variables names and add test case to ensure result * Change test assertion to ensure wokrs as ansible module * Change test for the HSF * Add fragment * More readable test and better coments * add changelog fragment for zoau version checker bugfix (#800) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Add ansible-lint tooling added (#813) * Add ansible-lint tooling Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: ac galaxy.yml meta/runtime.yml Changes to be committed: new file: .ansible-lint new file: ac modified: galaxy.yml modified: meta/runtime.yml * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Removing file brought it in by cherry pick Signed-off-by: ddimatos <dimatos@gmail.com> * Added missing keyword 'build_ignore' Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix for #807 - zos_copy ignores encoding for binary files (#810) * Updated normalization condition * Added test cases for bugfix * Added changelog fragment * Updated changelog fragment * Update zos_data_set member description created (#816) * Update zos_data_set module member description Signed-off-by: ddimatos <dimatos@gmail.com> * Adding fix for uncataloged vsam and non-vsam data sets Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes the issue of parts of a vsam cluster remaining behind and allows user to correctly delete DS not in cat Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volume Signed-off-by: ddimatos <dimatos@gmail.com> * Unbound local var fix Signed-off-by: ddimatos <dimatos@gmail.com> * Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> * remove unused imports Signed-off-by: ddimatos <dimatos@gmail.com> * Added 2.16 ignore since our pipeline supports devel which is at this time 2.16 Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volumegit Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update grammar issue Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix: Fixes zos_copy and zos_fetch deprecation msg for using _play_context.verbosity (#814) * Add new test case for verbosity check Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy tests to support display.verbosity and nested encoding Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: tests/functional/modules/test_zos_copy_func.py Changes to be committed: deleted: tests/functional/modules/test_module_display.py modified: tests/functional/modules/test_zos_copy_func.py * Update test framewor to provide support for adhoc module calls Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_fetch plugin to use the display.verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Lint correction Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with verbosity Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: tests/functional/modules/test_zos_copy_func.py Changes to be committed: modified: tests/functional/modules/test_zos_copy_func.py * Change from shell to raw module usage Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: tests/functional/modules/test_zos_copy_func.py Changes to be committed: modified: tests/functional/modules/test_zos_copy_func.py * changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment change Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Release tasks v1.6.0 (#829) * Galaxy for 1.6 Signed-off-by: ddimatos <dimatos@gmail.com> * Update ansible-core meta runtime Signed-off-by: ddimatos <dimatos@gmail.com> * Update collections private meta Signed-off-by: ddimatos <dimatos@gmail.com> * Missing trailing empty line Signed-off-by: ddimatos <dimatos@gmail.com> * Update readme with volume init info Signed-off-by: ddimatos <dimatos@gmail.com> * Update lint to match galaxy build_ignore Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog summary Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog Signed-off-by: ddimatos <dimatos@gmail.com> * Minor manual CHANGELOG updates Signed-off-by: ddimatos <dimatos@gmail.com> * Relase notes updated Signed-off-by: ddimatos <dimatos@gmail.com> * Update build_ignore to skip importer_result.json Signed-off-by: ddimatos <dimatos@gmail.com> * update galaxy build_ignore Signed-off-by: ddimatos <dimatos@gmail.com> * update galaxy build_ignore Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up changelog fragements no longer need post releasing 1.6 Signed-off-by: ddimatos <dimatos@gmail.com> * delete older profile, not needed Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> --- .ansible-lint | 2 - CHANGELOG.rst | 21 ++-- README.md | 5 +- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 38 ++++++ changelogs/fragments/.gitkeep | 0 docs/source/modules/zos_data_set.rst | 2 +- galaxy.yml | 5 +- meta/ibm_zos_core_meta.yml | 4 +- meta/runtime.yml | 2 +- plugins/module_utils/job.py | 1 - plugins/module_utils/zoau_version_checker.py | 6 +- plugins/modules/zos_copy.py | 51 +++++++- plugins/modules/zos_data_set.py | 2 +- scripts/mounts.sh | 0 .../functional/modules/test_zos_copy_func.py | 78 +++++++++++++ tests/unit/test_zoau_version_checker_unit.py | 109 ++++++++++++++++++ 17 files changed, 303 insertions(+), 25 deletions(-) create mode 100644 changelogs/fragments/.gitkeep mode change 100755 => 100644 scripts/mounts.sh create mode 100644 tests/unit/test_zoau_version_checker_unit.py diff --git a/.ansible-lint b/.ansible-lint index a83129368..ac0ca0b26 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -25,9 +25,7 @@ exclude_paths: - tests/helpers - tests/requirements.txt - tests/unit - - tests/sanity/ignore-2.9.txt - tests/sanity/ignore-2.10.txt - - tests/sanity/ignore-2.11.txt - venv* parseable: true quiet: false diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c19a39bbc..7cf358b23 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,13 +5,13 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.6.0-beta.1 -============= +v1.6.0 +====== Release Summary --------------- -Release Date: '2023-04-26' +Release Date: '2023-06-23' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review @@ -26,11 +26,11 @@ Minor Changes ------------- - Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning".. warn Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) -- module_utils - job.py utility did not support positional wiled card placement, this enhancement uses `fnmatch` logic to support wild cards. +- module_utils - job.py utility did not support positional wild card placement, this enhancement uses `fnmatch` logic to support wild cards. - zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) - zos_copy - was enhanced to keep track of modified members in a destination dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) -- zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). -- zos_job_query - ansible module does not support positional wild card placement for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- zos_data_set - add force parameter to enable member delete while PDS/e is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). +- zos_job_query - ansible module does not support positional wild card placement for `job_name` or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) - zos_lineinfile - would access data sets with exclusive access so no other task can read the data, this enhancement allows for a data set to be opened with a disposition set to share so that other tasks can access the data when option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) - zos_tso_command - was enhanced to accept `max_rc` as an option. This option allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) @@ -38,11 +38,18 @@ Bugfixes -------- - Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". -- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an approriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) +- module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/816). +- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an appropriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) +- zos_copy - Encoding normalization used to handle newlines in text files was applied to binary files too. Fix makes sure that binary files bypass this normalization. (https://github.com/ansible-collections/ibm_zos_core/pull/810) - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) - zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/743) +- zos_copy - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). +- zos_copy - kept permissions on target directory when copy overwrote files. The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/790) +- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/816). - zos_encode - fixes a bug where converted files were not tagged afterwards with the new code set. (https://github.com/ansible-collections/ibm_zos_core/pull/534) +- zos_fetch - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). - zos_find - fixes a bug where find result values stopped being returned after first value in a list was 'not found'. (https://github.com/ansible-collections/ibm_zos_core/pull/668) +- zos_gather_facts - Fixes an issue in the zoau version checker which prevented the zos_gather_facts module from running with newer versions of ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/797) - zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed to ensure support for Python 2.7 on the controller. (https://github.com/ansible-collections/ibm_zos_core/pull/659) New Modules diff --git a/README.md b/README.md index 756f06d92..5d48210a9 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,8 @@ executing operator commands, executing TSO commands, ping, querying operator actions, APF authorizing libraries, editing textual data in data sets or Unix System Services files, finding data sets, backing up and restoring data sets and -volumes, mounting file systems and running z/OS programs without JCL. +volumes, mounting file systems, running z/OS programs without JCL and +initializing volumes. Red Hat Ansible Certified Content for IBM Z @@ -49,7 +50,7 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against **Ansible** and **Ansible Core** versions >=2.9,<2.15. +This collection has been tested against **Ansible** and **Ansible Core** versions >=2.9,<2.16. The Ansible and Ansible Core versions supported for this collection align to the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the [Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 2c3c67c65..fbc11cf4b 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -116,4 +116,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.6.0-beta.1 +version: 1.6.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 51bba3c4f..37049f8df 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -760,6 +760,44 @@ releases: name: zos_gather_facts namespace: '' release_date: '2022-11-02' + 1.6.0: + changes: + bugfixes: + - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM + data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/816). + - zos_copy - Encoding normalization used to handle newlines in text files was + applied to binary files too. Fix makes sure that binary files bypass this + normalization. (https://github.com/ansible-collections/ibm_zos_core/pull/810) + - zos_copy - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). + - zos_copy - kept permissions on target directory when copy overwrote files. + The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/790) + - zos_data_set - Reported a failure caused when `present=absent` for a VSAM + data set leaving behind cluster components. Fix introduces a new logical flow + that will evaluate the volumes, compare it to the provided value and if necessary + catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/816). + - zos_fetch - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). + - zos_gather_facts - Fixes an issue in the zoau version checker which prevented + the zos_gather_facts module from running with newer versions of ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/797) + release_summary: 'Release Date: ''2023-06-23'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 783_fix_zoau_version_checker.yml + - 790_overwrite_permissions_on_copy.yml + - 810_fix_binary_file_bypass.yml + - 813-ansible-lint.yml + - 814-zos_data_set-update-vsam-copy.yml + - 816-zos_data_set-update-vsam.yml + - v1.6.0_summary.yml + release_date: '2023-06-23' 1.6.0-beta.1: changes: bugfixes: diff --git a/changelogs/fragments/.gitkeep b/changelogs/fragments/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 8415694b0..9e051bece 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -56,7 +56,7 @@ state If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. - If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. diff --git a/galaxy.yml b/galaxy.yml index fad6b8f5c..87f10f272 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.6.0-beta.1 +version: 1.7.0-beta.1 # Collection README file readme: README.md @@ -80,6 +80,7 @@ build_ignore: - changelogs - collections - docs + - importer_result.json - scripts - test_config.yml - tests/*.ini @@ -90,7 +91,5 @@ build_ignore: - tests/helpers - tests/requirements.txt - tests/unit - - tests/sanity/ignore-2.9.txt - tests/sanity/ignore-2.10.txt - - tests/sanity/ignore-2.11.txt - venv* diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 484ad69fd..5e265309f 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.6.0-beta.1" +version: "1.7.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - "1.2.2" + - "1.2.3" diff --git a/meta/runtime.yml b/meta/runtime.yml index 9f9843979..576832bc7 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.12.00' +requires_ansible: '>=2.9.0' diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d987d5a52..d07ef5ac5 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -240,7 +240,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"] = {} job["ret_code"]["msg"] = entry.status + " " + entry.rc job["ret_code"]["msg_code"] = entry.rc - job["ret_code"]["code"] = None if len(entry.rc) > 0: if entry.rc.isdigit(): diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index 74494e04a..a5fff7196 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -27,8 +27,10 @@ def is_zoau_version_higher_than(min_version_str): bool -- Whether ZOAU version found was high enough. """ if is_valid_version_string(min_version_str): - # check zoau version on system + # check zoau version on system (already a list) system_version_list = get_zoau_version_str() + + # convert input to list format min_version_list = min_version_str.split('.') # convert list of strs to list of ints @@ -103,7 +105,7 @@ def get_zoau_version_str(): .stdout .decode('UTF-8') .strip() - .split(' ')[-1][1:] + .split(' ')[3][1:] .split('.') ) return version_list diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index fd7c8feac..ed8a691cc 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1256,7 +1256,7 @@ def _get_changed_files(self, src, dest, copy_directory): files_to_change.append(relative_path) # This change adds to the files_to_change variable any file that accord with - # a name found in the source copy. + # a name found in the source copy files_to_change.extend(existing_files) # Creating tuples with (filename, permissions). original_permissions = [ @@ -2245,6 +2245,53 @@ def normalize_line_endings(src, encoding=None): return src +def normalize_line_endings(src, encoding=None): + """ + Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes + its line endings to LF. + + Arguments: + src (str) -- Path of a USS file. + encoding (dict, optional) -- Encoding options for the module. + + Returns: + str -- Path to the normalized file. + """ + # Before copying into a destination dataset, we'll make sure that + # the source file doesn't contain any carriage returns that would + # result in empty records in the destination. + # Due to the differences between encodings, we'll normalize to IBM-037 + # before checking the EOL sequence. + enc_utils = encode.EncodeUtils() + src_tag = enc_utils.uss_file_tag(src) + copy_handler = CopyHandler(AnsibleModuleHelper(dict())) + + if src_tag == "untagged": + # This should only be true when src is a remote file and no encoding + # was specified by the user. + if not encoding: + encoding = {"from": encode.Defaults.get_default_system_charset()} + src_tag = encoding["from"] + + if src_tag != "IBM-037": + fd, converted_src = tempfile.mkstemp() + os.close(fd) + + enc_utils.uss_convert_encoding( + src, + converted_src, + src_tag, + "IBM-037" + ) + copy_handler._tag_file_encoding(converted_src, "IBM-037") + src = converted_src + + if copy_handler.file_has_crlf_endings(src): + src = copy_handler.create_temp_with_lf_endings(src) + + return src + + def run_module(module, arg_def): # ******************************************************************** # Verify the validity of module args. BetterArgParser raises ValueError @@ -2339,7 +2386,7 @@ def run_module(module, arg_def): # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. - if not is_uss: + if not is_binary and not is_uss: new_src = temp_path or src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index ca6012978..73af4acf1 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -60,7 +60,7 @@ - > If I(state=absent) and I(volumes) is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided - I(volumes). If they volume attributes are different, the cataloged data set + I(volumes). If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with I(changed=False). diff --git a/scripts/mounts.sh b/scripts/mounts.sh old mode 100755 new mode 100644 diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 7b69a75b4..345e6067d 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -44,6 +44,11 @@ DUMMY_DATA_CRLF = b"00000001 DUMMY DATA\r\n00000002 DUMMY DATA\r\n" +# FD is outside of the range of UTF-8, so it should be useful when testing +# that binary data is not getting converted. +DUMMY_DATA_BINARY = b"\xFD\xFD\xFD\xFD" +DUMMY_DATA_BINARY_ESCAPED = "\\xFD\\xFD\\xFD\\xFD" + VSAM_RECORDS = """00000001A record 00000002A record 00000003A record @@ -1300,6 +1305,79 @@ def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): os.remove(src) +# The following two tests are to address the bugfix for issue #807. +@pytest.mark.uss +@pytest.mark.seq +def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): + hosts = ansible_zos_module + dest = "USER.TEST.SEQ.FUNCTEST" + + fd, src = tempfile.mkstemp() + os.close(fd) + with open(src, "wb") as infile: + infile.write(DUMMY_DATA_BINARY) + + try: + hosts.all.zos_data_set(name=dest, state="absent") + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=False, + is_binary=True + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + finally: + hosts.all.zos_data_set(name=dest, state="absent") + os.remove(src) + + +@pytest.mark.uss +@pytest.mark.seq +def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module): + hosts = ansible_zos_module + src = "/tmp/zos_copy_binary_file" + dest = "USER.TEST.SEQ.FUNCTEST" + + try: + hosts.all.zos_data_set(name=dest, state="absent") + + # Creating a binary file on the remote system through Python + # to avoid encoding issues if we were to copy a local file + # or use the shell directly. + python_cmd = """python3 -c 'with open("{0}", "wb") as f: f.write(b"{1}")'""".format( + src, + DUMMY_DATA_BINARY_ESCAPED + ) + python_result = hosts.all.shell(python_cmd) + for result in python_result.contacted.values(): + assert result.get("msg") is None or result.get("msg") == "" + assert result.get("stderr") is None or result.get("stderr") == "" + + # Because the original bug report used a file tagged as 'binary' + # on z/OS, we'll recreate that use case here. + hosts.all.shell("chtag -b {0}".format(src)) + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=True, + is_binary=True + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + finally: + hosts.all.zos_data_set(name=dest, state="absent") + hosts.all.file(path=src, state="absent") + + @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ diff --git a/tests/unit/test_zoau_version_checker_unit.py b/tests/unit/test_zoau_version_checker_unit.py new file mode 100644 index 000000000..96031f4a1 --- /dev/null +++ b/tests/unit/test_zoau_version_checker_unit.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ibm_zos_core.plugins.module_utils.zoau_version_checker import ( + get_zoau_version_str, + is_valid_version_string, + is_zoau_version_higher_than +) + +import pytest, mock +import types + +# used in patch decorators below +IMPORT_NAME = "ibm_zos_core.plugins.module_utils.zoau_version_checker" + +# Tests for zoau_version_checker + +zoaversion_output = [ + + (['1','0','2'], "2020/03/03 19:24:41 CUT V1.0.2"), + (['1','0','3'], "2020/05/06 18:17:13 CUT V1.0.3"), + (['1','0','3'], "2020/07/07 14:54:31 CUT V1.0.3"), + (['1','1','0'], "2020/08/05 13:08:52 CUT V1.1.0"), + (['1','1','0'], "2020/08/20 12:50:07 CUT V1.1.0"), + (['1','1','0'], "2020/09/16 13:41:25 CUT V1.1.0"), + (['1','1','0'], "2020/09/25 14:07:34 CUT V1.1.0"), + (['1','1','1'], "2021/03/26 15:44:32 CUT V1.1.1"), + (['1','2','0'], "2021/07/07 22:36:30 CUT V1.2.0"), + (['1','2','0'], "2021/08/05 22:12:58 CUT V1.2.0"), + (['1','2','1'], "2022/07/12 18:35:28 CUT V1.2.1"), + (['1','2','1'], "2022/08/17 21:25:13 CUT V1.2.1"), + (['1','2','1'], "2022/08/25 21:44:21 CUT V1.2.1 31163ab 1856"), + (['1','2','1'], "2022/09/07 15:26:50 CUT V1.2.1 d2f6557 1880"), + (['1','2','3'], "2022/12/03 13:33:22 CUT V1.2.3 6113dc9 2512"), + (['1','2','2'], "2022/12/06 20:44:00 CUT V1.2.2 ee30137 2525"), + (['1','2','3'], "2023/03/16 18:17:00 CUT V1.2.3 1aa591fb 2148 PH50145"), + (['1', '2', '4', '0'], "2023/06/02 13:28:30 CUT V1.2.4.0 3b866824 2873 PH52034 826 267d9646"), + +] + + +@pytest.mark.parametrize("version_string, zoaversion", zoaversion_output) +@mock.patch('subprocess.run') +def test_get_zoau_version_str(mocker, version_string, zoaversion): + # 'get_zoau_version_str' makes a call to 'zoaversion' on the target host by + # calling 'subprocess.run', which returns an object with an attr 'stdout' + # that contains the byte string of the console output. The following mocks + # this behavior so the code can be tested without making a call to a host. + # Instead, zoaversion output for various versions of ZOAU are stored in the + # list of tuples 'zoaversion_output' above and returned by the mocked call + # to 'subprocess.run' after being converted to bytes. SimpleNamespace is an + # object subclass which allows for attributes to be set/removed. In our + # case, 'get_zoau_version_str' expects a 'stdout' attribute in the return + # struct of 'subprocess.run', which we mock via SimpleNamespace. + + mocker.return_value = types.SimpleNamespace( + stdout = bytes(zoaversion, 'utf-8'), + ) + + assert version_string == get_zoau_version_str() + + +@pytest.mark.parametrize("version_string, zoaversion", zoaversion_output) +def test_is_valid_version_string(version_string,zoaversion): + # The first parameter in our zoaversion_output list of tuples above is the + # return value of the function 'get_zoau_version_str' in the form of + # ['#','#','#'] or ['#','#','#','#']. A 'join' str operation with a dot(.) + # yields "#.#.#" or "#.#.#.#". And since these values are taken from this + # list, they can all be expected to be valid ZOAU verison strings. + + assert True == is_valid_version_string('.'.join(version_string)) + + +test_data = [ +# result, "sytem-level" ZOAU >= min-ZOAU + (True, ['1', '2', '1'], "1.2.1"), + (True, ['1', '2', '3'], "1.2.1"), + (True, ['1', '2', '4', '0'], "1.2.2"), + + (False, ['1', '1', '1'], "1.2.3"), + (False, ['1', '1', '1'], "1.2.4.0"), + (False, ['1', '2', '0', '1'], "1.2.1"), +] + + +@pytest.mark.parametrize("expected, sys_zoau, min_version_str", test_data) +@mock.patch(IMPORT_NAME+'.get_zoau_version_str') +def test_is_zoau_version_higher_than(mocker, expected, sys_zoau, min_version_str): + # The 'is_zoau_version_higher_than' function calls 'get_zoau_version_str' to + # get the ZOAU version string from the system. We mock that call and provide + # our own "system" level ZOAU version str to compare against our provided + # minimum ZOAU version string. + + mocker.return_value = sys_zoau + assert expected == is_zoau_version_higher_than(min_version_str) From 44c578e686ffe9e7ace7fdc8bfa3efbf48f38fd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 7 Jul 2023 13:43:17 -0600 Subject: [PATCH 138/495] Bug fix, zos_copy returns an error message when a concurrent copy fails (#794) * Test case to validate bug does not happen * First iteration for solutions * First proposal to validate_disposition * Remove unecesary test * Solvin unecesary code * Cover all cases with bug or false positive * Add test case to ensure behaviour * Get the better version of test case * Add fragment * Solve identation * Solve identation * Solve identation * Solve error in cleanup folders * Change function name * Change variables names * Solve wrote and write * Update changelog entry Signed-off-by: ddimatos <dimatos@gmail.com> * Better verbose and function name * Better message * Solve certification tests * Clearer and eficient version * continuation line over-indented solve * continuation line over-indented solve --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- ...or_message_when_concurrent_copy_fails.yaml | 6 ++ plugins/modules/zos_copy.py | 43 ++++++++++- .../functional/modules/test_zos_copy_func.py | 75 +++++++++++++++++++ 3 files changed, 123 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml diff --git a/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml b/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml new file mode 100644 index 000000000..dd5b71220 --- /dev/null +++ b/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml @@ -0,0 +1,6 @@ +bugfixes: + - zos_copy - Reported a false positive such that the response would have + `changed=true` when copying from a source (src) or destination (dest) + data set that was in use (DISP=SHR). This change now displays an appropriate + error message and returns `changed=false`. + (https://github.com/ansible-collections/ibm_zos_core/pull/794). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index ed8a691cc..6a3be27b8 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -737,7 +737,7 @@ from re import match as fullmatch try: - from zoautil_py import datasets + from zoautil_py import datasets, opercmd except Exception: datasets = MissingZOAUImport() @@ -2245,6 +2245,37 @@ def normalize_line_endings(src, encoding=None): return src +def data_set_locked(dataset_name): + """ + Checks if a data set is in use and therefore locked (DISP=SHR), which + is often caused by a long running task. Returns a boolean value to indicate the data set status. + + Arguments: + dataset_name (str) - the data set name used to check if there is a lock. + + Returns: + bool -- rue if the data set is locked, or False if the data set is not locked. + """ + # Using operator command "D GRS,RES=(*,{dataset_name})" to detect if a data set + # is in use, when a data set is in use it will have "EXC/SHR and SHARE" + # in the result with a length greater than 4. + result = dict() + result["stdout"] = [] + command_dgrs = "D GRS,RES=(*,{0})".format(dataset_name) + response = opercmd.execute(command=command_dgrs) + stdout = response.stdout_response + if stdout is not None: + for out in stdout.split("\n"): + if out: + result["stdout"].append(out) + if len(result["stdout"]) > 4 and "EXC/SHR" in stdout and "SHARE" in stdout: + return True + elif len(result["stdout"]) <= 4 and "NO REQUESTORS FOR RESOURCE" in stdout: + return False + else: + return False + + def normalize_line_endings(src, encoding=None): """ Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes @@ -2501,6 +2532,16 @@ def run_module(module, arg_def): ) # ******************************************************************** + # To validate the source and dest are not lock in a batch process by + # the machine and not generate a false positive check the disposition + # for try to write in dest and if both src and dest are in lock. + # ******************************************************************** + if dest_ds_type != "USS": + is_dest_lock = data_set_locked(dest_name) + if is_dest_lock: + module.fail_json( + msg="Unable to write to dest '{0}' because a task is accessing the data set.".format(dest_name)) + # ******************************************************************** # Backup should only be performed if dest is an existing file or # data set. Otherwise ignored. # ******************************************************************** diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 345e6067d..db6262bc9 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -22,6 +22,7 @@ from tempfile import mkstemp import subprocess + __metaclass__ = type @@ -117,6 +118,30 @@ """ +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + def populate_dir(dir_path): for i in range(5): with open(dir_path + "/" + "file" + str(i + 1), "w") as infile: @@ -1198,6 +1223,54 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, hosts.all.file(path=dest_path, state="absent") +@pytest.mark.seq +def test_copy_dest_lock(ansible_zos_module): + DATASET_1 = "USER.PRIVATE.TESTDS" + DATASET_2 = "ADMI.PRIVATE.TESTDS" + MEMBER_1 = "MEM1" + try: + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DATASET_1, state="present", type="pdse", replace=True) + hosts.all.zos_data_set(name=DATASET_2, state="present", type="pdse", replace=True) + hosts.all.zos_data_set(name=DATASET_1 + "({0})".format(MEMBER_1), state="present", type="member", replace=True) + hosts.all.zos_data_set(name=DATASET_2 + "({0})".format(MEMBER_1), state="present", type="member", replace=True) + # copy text_in source + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(DUMMY_DATA, DATASET_2+"({0})".format(MEMBER_1))) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DATASET_1, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + # pause to ensure c code acquires lock + time.sleep(5) + results = hosts.all.zos_copy( + src = DATASET_2 + "({0})".format(MEMBER_1), + dest = DATASET_1 + "({0})".format(MEMBER_1), + remote_src = True, + force = True + ) + for result in results.contacted.values(): + print(result) + assert result.get("changed") == False + assert result.get("msg") is not None + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + # remove pdse + hosts.all.zos_data_set(name=DATASET_1, state="absent") + hosts.all.zos_data_set(name=DATASET_2, state="absent") + + @pytest.mark.uss @pytest.mark.seq def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): @@ -2984,6 +3057,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( hosts.all.zos_data_set(name=dest, state="absent") + @pytest.mark.parametrize("options", [ dict(src="/etc/profile", dest="/tmp/zos_copy_test_profile", force=True, is_remote=False, verbosity="-vvvvv", verbosity_level=5), @@ -3019,3 +3093,4 @@ def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): finally: hosts.all.file(path=options["dest"], state="absent") + From bfd30fc05c922c8fac6ec28eaef58b6d0d595670 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 7 Jul 2023 13:03:48 -0700 Subject: [PATCH 139/495] Enhancement/428/jinja2 template support (#667) * Added Jinja2 support for single files in zos_copy * Fixed cleanup of rendered templates * Added templating for directories * Fixed encoding conversion for directories (#616) While working on adding Jinja2 support for directories, I noticed that files that were in subdirectories would show garbage in a shell, which reminded me of issue #616. After implementing their suggestion, files now show the rendered templates. * Disabled rendering for remote sources * Enhanced exception handling for templates * Added encoding normalization for templates * Added templating tests to zos_copy * Added templating support to zos_job_submit * Fixed missing newline * Added failure when using templates with remote sources * Fixed encoding normalization * Fixed sanity tests issues * Added Jinja2 tests to zos_job_submit * Updated template test for zos_job_submit * Fixed template environment creation * Refactored template environment creation * Fixed initial size computation for local files * Fixed merge mistakes * Updated description for trim_blocks * Updated docs * Added test for Jinja2 loops * Added changelog fragment * Removed duplicated function * Changed try-except blocks used in action plugins * Updated template docs with variable precedence * Changed dictionary update in action plugin * Added another encoding for template tests * Fixed import errors in template.py * Fixed import error in Python 2.7 * Fixed bandit issue * Fixed template cleanup * Updated documentation --------- Co-authored-by: Demetri <dimatos@gmail.com> --- changelogs/fragments/667-template-support.yml | 4 + docs/source/modules/zos_copy.rst | 200 +++++++++++ docs/source/modules/zos_job_submit.rst | 127 +++++++ plugins/action/zos_copy.py | 67 +++- plugins/action/zos_job_submit.py | 49 +++ plugins/doc_fragments/template.py | 120 +++++++ plugins/module_utils/template.py | 336 ++++++++++++++++++ plugins/modules/zos_copy.py | 52 ++- plugins/modules/zos_job_submit.py | 28 ++ .../functional/modules/test_zos_copy_func.py | 300 ++++++++++++++++ .../modules/test_zos_job_submit_func.py | 112 +++++- tests/pytest.ini | 3 +- 12 files changed, 1388 insertions(+), 10 deletions(-) create mode 100644 changelogs/fragments/667-template-support.yml create mode 100644 plugins/doc_fragments/template.py create mode 100644 plugins/module_utils/template.py diff --git a/changelogs/fragments/667-template-support.yml b/changelogs/fragments/667-template-support.yml new file mode 100644 index 000000000..2ac499a3d --- /dev/null +++ b/changelogs/fragments/667-template-support.yml @@ -0,0 +1,4 @@ +minor_changes: + - Add support for Jinja2 templates in zos_copy and zos_job_submit + when using local source files. + (https://github.com/ansible-collections/ibm_zos_core/pull/667) \ No newline at end of file diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 89be20ebb..45dee10a7 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -375,6 +375,133 @@ dest_data_set +use_template + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + + Only valid when ``src`` is a local file or directory. + + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + + | **required**: False + | **type**: bool + + +template_parameters + Options to set the way Jinja2 will process templates. + + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + + These options are ignored unless ``use_template`` is true. + + | **required**: False + | **type**: dict + + + variable_start_string + Marker for the beginning of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: {{ + + + variable_end_string + Marker for the end of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: }} + + + block_start_string + Marker for the beginning of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: {% + + + block_end_string + Marker for the end of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: %} + + + comment_start_string + Marker for the beginning of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: {# + + + comment_end_string + Marker for the end of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: #} + + + line_statement_prefix + Prefix used by Jinja2 to identify line-based statements. + + | **required**: False + | **type**: str + + + line_comment_prefix + Prefix used by Jinja2 to identify comment lines. + + | **required**: False + | **type**: str + + + lstrip_blocks + Whether Jinja2 should strip leading spaces from the start of a line to a block. + + | **required**: False + | **type**: bool + + + trim_blocks + Whether Jinja2 should remove the first newline after a block is removed. + + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + + | **required**: False + | **type**: bool + | **default**: True + + + keep_trailing_newline + Whether Jinja2 should keep the first trailing newline at the end of a template after rendering. + + | **required**: False + | **type**: bool + + + newline_sequence + Sequence that starts a newline in a template. + + | **required**: False + | **type**: str + | **default**: \\n + | **choices**: \\n, \\r, \\r\\n + + + auto_reload + Whether to reload a template file when it has changed after the task has started. + + | **required**: False + | **type**: bool + + + Examples @@ -595,6 +722,79 @@ dest | **type**: str | **sample**: SAMPLE.SEQ.DATA.SET +dest_created + Indicates whether the module created the destination. + + | **returned**: success and if dest was created by the module. + | **type**: bool + | **sample**: + + .. code-block:: json + + true + +destination_attributes + Attributes of a dest created by the module. + + | **returned**: success and destination was created by the module. + | **type**: dict + | **sample**: + + .. code-block:: json + + { + "block_size": 32760, + "record_format": "FB", + "record_length": 45, + "space_primary": 2, + "space_secondary": 1, + "space_type": "K", + "type": "PDSE" + } + + block_size + Block size of the dataset. + + | **type**: int + | **sample**: 32760 + + record_format + Record format of the dataset. + + | **type**: str + | **sample**: FB + + record_length + Record length of the dataset. + + | **type**: int + | **sample**: 45 + + space_primary + Allocated primary space for the dataset. + + | **type**: int + | **sample**: 2 + + space_secondary + Allocated secondary space for the dataset. + + | **type**: int + | **sample**: 1 + + space_type + Unit of measurement for space. + + | **type**: str + | **sample**: K + + type + Type of dataset allocated. + + | **type**: str + | **sample**: PDSE + + checksum SHA256 checksum of the file after running zos_copy. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 4375564bb..9714f2766 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -140,6 +140,133 @@ encoding +use_template + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + + Only valid when ``src`` is a local file or directory. + + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + + | **required**: False + | **type**: bool + + +template_parameters + Options to set the way Jinja2 will process templates. + + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + + These options are ignored unless ``use_template`` is true. + + | **required**: False + | **type**: dict + + + variable_start_string + Marker for the beginning of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: {{ + + + variable_end_string + Marker for the end of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: }} + + + block_start_string + Marker for the beginning of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: {% + + + block_end_string + Marker for the end of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: %} + + + comment_start_string + Marker for the beginning of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: {# + + + comment_end_string + Marker for the end of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: #} + + + line_statement_prefix + Prefix used by Jinja2 to identify line-based statements. + + | **required**: False + | **type**: str + + + line_comment_prefix + Prefix used by Jinja2 to identify comment lines. + + | **required**: False + | **type**: str + + + lstrip_blocks + Whether Jinja2 should strip leading spaces from the start of a line to a block. + + | **required**: False + | **type**: bool + + + trim_blocks + Whether Jinja2 should remove the first newline after a block is removed. + + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + + | **required**: False + | **type**: bool + | **default**: True + + + keep_trailing_newline + Whether Jinja2 should keep the first trailing newline at the end of a template after rendering. + + | **required**: False + | **type**: bool + + + newline_sequence + Sequence that starts a newline in a template. + + | **required**: False + | **type**: str + | **default**: \\n + | **choices**: \\n, \\r, \\r\\n + + + auto_reload + Whether to reload a template file when it has changed after the task has started. + + | **required**: False + | **type**: bool + + + Examples diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index b98432aa6..b557e8605 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -16,6 +16,7 @@ import os import stat import time +import shutil from tempfile import mkstemp, gettempprefix @@ -34,6 +35,8 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template + display = Display() @@ -112,11 +115,18 @@ def run(self, tmp=None, task_vars=None): msg = "Backup file provided but 'backup' parameter is False" return self._exit_action(result, msg, failed=True) + use_template = _process_boolean(task_args.get("use_template"), default=False) + if remote_src and use_template: + msg = "Use of Jinja2 templates is only valid for local files, remote_src cannot be set to true." + return self._exit_action(result, msg, failed=True) + if not is_uss: if mode or owner or group: msg = "Cannot specify 'mode', 'owner' or 'group' for MVS destination" return self._exit_action(result, msg, failed=True) + template_dir = None + if not remote_src: if local_follow and not src: msg = "No path given for local symlink" @@ -150,14 +160,65 @@ def run(self, tmp=None, task_vars=None): dict(src=src, dest=dest, changed=False, failed=True) ) return result + + if use_template: + template_parameters = task_args.get("template_parameters", dict()) + if encoding: + template_encoding = encoding.get("from", None) + else: + template_encoding = None + + try: + renderer = template.create_template_environment( + template_parameters, + src, + template_encoding + ) + template_dir, rendered_dir = renderer.render_dir_template( + task_vars.get("vars", dict()) + ) + except Exception as err: + if template_dir: + shutil.rmtree(template_dir, ignore_errors=True) + return self._exit_action(result, str(err), failed=True) + + src = rendered_dir + task_args["size"] = sum( - os.stat(path + "/" + f).st_size for f in files + os.stat(os.path.join(path, f)).st_size + for path, dirs, files in os.walk(src) + for f in files ) else: if mode == "preserve": task_args["mode"] = "0{0:o}".format( stat.S_IMODE(os.stat(src).st_mode) ) + + if use_template: + template_parameters = task_args.get("template_parameters", dict()) + if encoding: + template_encoding = encoding.get("from", None) + else: + template_encoding = None + + try: + renderer = template.create_template_environment( + template_parameters, + src, + template_encoding + ) + template_dir, rendered_file = renderer.render_file_template( + os.path.basename(src), + task_vars.get("vars", dict()) + ) + except Exception as err: + if template_dir: + shutil.rmtree(template_dir, ignore_errors=True) + return self._exit_action(result, str(err), failed=True) + + src = rendered_file + task_args["size"] = os.stat(src).st_size display.vvv(u"ibm_zos_copy calculated size: {0}".format(os.stat(src).st_size), host=self._play_context.remote_addr) transfer_res = self._copy_to_remote( @@ -187,6 +248,10 @@ def run(self, tmp=None, task_vars=None): task_vars=task_vars, ) + # Erasing all rendered Jinja2 templates from the controller. + if template_dir: + shutil.rmtree(template_dir, ignore_errors=True) + if copy_res.get("note") and not force: result["note"] = copy_res.get("note") return result diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 7247f6b7b..715ce57ed 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -17,8 +17,11 @@ from ansible.errors import AnsibleError, AnsibleFileNotFound # from ansible.module_utils._text import to_bytes, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text +from ansible.module_utils.parsing.convert_bool import boolean import os +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template + class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): @@ -32,6 +35,17 @@ def run(self, tmp=None, task_vars=None): return result module_args = self._task.args.copy() + + use_template = _process_boolean(module_args.get("use_template")) + location = module_args.get("location") + if use_template and location != "LOCAL": + result.update(dict( + failed=True, + changed=False, + msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'LOCAL'".format(location) + )) + return result + if module_args["location"] == "LOCAL": source = self._task.args.get("src", None) @@ -94,6 +108,30 @@ def run(self, tmp=None, task_vars=None): tmp_src = self._connection._shell.join_path(tmp, "source") + rendered_file = None + if use_template: + template_parameters = module_args.get("template_parameters", dict()) + encoding = module_args.get("encoding", dict()) + + try: + renderer = template.create_template_environment( + template_parameters, + source_full, + encoding.get("from", None) + ) + template_dir, rendered_file = renderer.render_file_template( + os.path.basename(source_full), + task_vars + ) + except Exception as err: + result["msg"] = to_text(err) + result["failed"] = True + result["changed"] = False + result["invocation"] = dict(module_args=module_args) + return result + + source_full = rendered_file + remote_path = None remote_path = self._transfer_file(source_full, tmp_src) @@ -127,6 +165,10 @@ def run(self, tmp=None, task_vars=None): task_vars=task_vars, ) ) + + if rendered_file: + os.remove(rendered_file) + else: result.update( self._execute_module( @@ -153,3 +195,10 @@ def delete_dict_entries(entries, dictionary): # entries = ('checksum', 'dest', 'gid', 'group', 'md5sum', 'mode', 'owner', 'size', 'src', 'state', 'uid') # delete_dict_entries(entries, result) return result + + +def _process_boolean(arg, default=False): + try: + return boolean(arg) + except TypeError: + return default diff --git a/plugins/doc_fragments/template.py b/plugins/doc_fragments/template.py new file mode 100644 index 000000000..1eea4ad3d --- /dev/null +++ b/plugins/doc_fragments/template.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r''' +options: + use_template: + description: + - Whether the module should treat C(src) as a Jinja2 template and + render it before continuing with the rest of the module. + - Only valid when C(src) is a local file or directory. + - All variables defined in inventory files, vars files and the playbook + will be passed to the template engine, + as well as L(Ansible special variables,https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables), + such as C(playbook_dir), C(ansible_version), etc. + - If variables defined in different scopes share the same name, Ansible will + apply variable precedence to them. You can see the complete precedence order + L(in Ansible's documentation,https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence) + type: bool + default: false + template_parameters: + description: + - Options to set the way Jinja2 will process templates. + - Jinja2 already sets defaults for the markers it uses, you can find more + information at its L(official documentation,https://jinja.palletsprojects.com/en/latest/templates/). + - These options are ignored unless C(use_template) is true. + required: false + type: dict + suboptions: + variable_start_string: + description: + - Marker for the beginning of a statement to print a variable in Jinja2. + type: str + default: '{{' + variable_end_string: + description: + - Marker for the end of a statement to print a variable in Jinja2. + type: str + default: '}}' + block_start_string: + description: + - Marker for the beginning of a block in Jinja2. + type: str + default: '{%' + block_end_string: + description: + - Marker for the end of a block in Jinja2. + type: str + default: '%}' + comment_start_string: + description: + - Marker for the beginning of a comment in Jinja2. + type: str + default: '{#' + comment_end_string: + description: + - Marker for the end of a comment in Jinja2. + type: str + default: '#}' + line_statement_prefix: + description: + - Prefix used by Jinja2 to identify line-based statements. + type: str + required: false + line_comment_prefix: + description: + - Prefix used by Jinja2 to identify comment lines. + type: str + required: false + lstrip_blocks: + description: + - Whether Jinja2 should strip leading spaces from the start of a line + to a block. + type: bool + default: false + trim_blocks: + description: + - Whether Jinja2 should remove the first newline after a block is removed. + - Setting this option to C(False) will result in newlines being added to + the rendered template. This could create invalid code when working with + JCL templates or empty records in destination data sets. + type: bool + default: true + keep_trailing_newline: + description: + - Whether Jinja2 should keep the first trailing newline at the end of a + template after rendering. + type: bool + default: false + newline_sequence: + description: + - Sequence that starts a newline in a template. + type: str + default: "\n" + choices: + - "\n" + - "\r" + - "\r\n" + auto_reload: + description: + - Whether to reload a template file when it has changed after the task + has started. + type: bool + default: false +''' diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py new file mode 100644 index 000000000..3f0c95021 --- /dev/null +++ b/plugins/module_utils/template.py @@ -0,0 +1,336 @@ +# Copyright (c) IBM Corporation 2022, 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import os +import tempfile +from os import path + +from ansible.module_utils._text import to_native +from ansible.module_utils.parsing.convert_bool import boolean + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + MissingImport, +) + +# This module is to be used locally, so jinja2 only needs to be installed in the +# controller, but Ansible sanity testing simulates what would happen if a managed +# node tried to use this module_util, hence the use of MissingImport. +try: + import jinja2 +except Exception: + jinja2 = MissingImport("jinja2") + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode + + +def _process_boolean(arg, default=False): + try: + return boolean(arg) + except TypeError: + return default + + +def create_template_environment(template_parameters, src, template_encoding=None): + """Parses boolean parameters for Jinja2 and returns a TemplateRenderer + instance. + + Arguments: + template_parametrs (dict): Parameters for creating the template environment. + src (str): Local path where the templates are located. + template_encoding (dict, optional): encoding used by the templates. If not + given, the default locale set in the system will be used. + + Returns: + TemplateRenderer -- Object with a new template environment ready to + render the templates found in src. + """ + if template_parameters.get("lstrip_blocks"): + template_parameters["lstrip_blocks"] = _process_boolean(template_parameters.get("lstrip_blocks"), default=False) + if template_parameters.get("trim_blocks"): + template_parameters["trim_blocks"] = _process_boolean(template_parameters.get("trim_blocks"), default=True) + if template_parameters.get("keep_trailing_newline"): + template_parameters["keep_trailing_newline"] = _process_boolean(template_parameters.get("keep_trailing_newline"), default=False) + if template_parameters.get("auto_reload"): + template_parameters["auto_reload"] = _process_boolean(template_parameters.get("auto_reload"), default=False) + + if not template_encoding: + template_encoding = encode.Defaults.get_default_system_charset() + + return TemplateRenderer(src, template_encoding, **template_parameters) + + +class TemplateRenderer: + """This class implements functionality to load and render Jinja2 + templates. To add support for Jinja2 in a module, you need to include + the template.py doc fragment, add the options for configuring the Jinja2 + environment to the module's options, and instantiate this class to + render templates inside an action plugin. + """ + + _ALLOWED_NEWLINE_DELIMITERS = ["\n", "\r", "\r\n"] + _FIXABLE_NEWLINE_DELIMITERS = ["\\n", "\\r", "\\r\\n"] + _NEWLINE_DELIMITER_SWAP = { + "\\n": "\n", + "\\r": "\r", + "\\r\\n": "\r\n" + } + + def __init__( + self, + template_path, + encoding, + variable_start_string="{{", + variable_end_string="}}", + block_start_string="{%", + block_end_string="%}", + comment_start_string="{#", + comment_end_string="#}", + line_statement_prefix=None, + line_comment_prefix=None, + lstrip_blocks=False, + trim_blocks=True, + keep_trailing_newline=False, + newline_sequence="\n", + auto_reload=False, + ): + """Initializes a new TemplateRenderer object with a Jinja2 + environment that can use templates from a given directory. + More information about Jinja2 templates and environments can + be found at https://jinja.palletsprojects.com/en/3.0.x/api/. + + Arguments: + template_path (str): Path to a Jinja2 template file or directory. + encoding (str): Encoding for rendered templates. + variable_start_string (str, optional): Marker for the beginning of + a statement to print a variable in Jinja2. + variable_end_string (str, optional): Marker for the end of + a statement to print a variable in Jinja2. + block_start_string (str, optional): Marker for the beginning of + a block in Jinja2. + block_end_string (str, optional): Marker for the end of a block + in Jinja2. + comment_start_string (str, optional): Marker for the beginning of + a comment in Jinja2. + comment_end_string (str, optional): Marker for the end of a comment + in Jinja2. + line_statement_prefix (str, optional): Prefix used by Jinja2 to identify + line-based statements. + line_comment_prefix (str, optional): Prefix used by Jinja2 to identify + comment lines. + lstrip_blocks (bool, optional): Whether Jinja2 should strip leading spaces + from the start of a line to a block. + trim_blocks (bool, optional): Whether Jinja2 should remove the first + newline after a block is removed. + keep_trailing_newline (bool, optional): Whether Jinja2 should keep the + first trailing newline at the end of a template after rendering. + newline_sequence (str, optional): Sequence that starts a newline in a + template. Valid values are '\n', '\r', '\r\n'. + auto_reload (bool, optional): Whether to reload a template file when it + has changed after creating the Jinja2 environment. + + Raises: + FileNotFoundError: When template_path points to a non-existent + file or directory. + ValueError: When the newline sequence is not valid. + """ + if not path.exists(template_path): + raise FileNotFoundError("The template path {0} does not exist".format( + template_path + )) + + template_canonical_path = path.realpath(template_path) + if path.isdir(template_canonical_path): + template_dir = template_canonical_path + else: + template_dir = path.dirname(template_canonical_path) + + if newline_sequence in self._FIXABLE_NEWLINE_DELIMITERS: + newline_sequence = self._NEWLINE_DELIMITER_SWAP[newline_sequence] + + if newline_sequence not in self._ALLOWED_NEWLINE_DELIMITERS: + raise ValueError("Newline delimiter '{0}' is not valid".format( + to_native(newline_sequence) + )) + + self.encoding = encoding + self.template_dir = template_dir + self.templating_env = jinja2.Environment( + block_start_string=block_start_string, + block_end_string=block_end_string, + variable_start_string=variable_start_string, + variable_end_string=variable_end_string, + comment_start_string=comment_start_string, + comment_end_string=comment_end_string, + line_statement_prefix=line_statement_prefix, + line_comment_prefix=line_comment_prefix, + trim_blocks=trim_blocks, + lstrip_blocks=lstrip_blocks, + newline_sequence=newline_sequence, + keep_trailing_newline=keep_trailing_newline, + loader=jinja2.FileSystemLoader( + searchpath=template_dir, + encoding=encoding, + ), + auto_reload=auto_reload, + autoescape=True, + ) + + def render_file_template(self, file_path, variables): + """Loads a template from the templates directory and renders + it using the Jinja2 environment configured in the object. + + Arguments: + file_path (str): Relative path (from the template directory) + to a template. + variables (dict): Dictionary containing the variables and + their values that will be substituted in the template. + + Returns: + tuple -- Filepath to a temporary directory that contains the + rendered template, and the complete filepath to the + rendered template. + + Raises: + TemplateNotFound: When the template file doesn't exist in the + template directory. + TemplateError: When rendering of the template fails. + FileExistsError: When there is an error while trying to create the + temp directory for rendered templates. + PermissionError: When there is an error accessing the temp directory. + IOError: When there is an error writing the rendered template. + ValueError: When there is an error writing the rendered template. + """ + try: + template = self.templating_env.get_template(file_path) + rendered_contents = template.render(variables) + except jinja2.TemplateNotFound as err: + raise jinja2.TemplateNotFound("Template {0} was not found: {1}".format( + file_path, + to_native(err) + )) + except jinja2.TemplateError as err: + raise jinja2.TemplateError("Error while rendering {0}: {1}".format( + file_path, + to_native(err) + )) + + try: + temp_template_dir = tempfile.mkdtemp() + except FileExistsError as err: + raise FileExistsError("Unable to create directory for rendered templates: {0}".format( + to_native(err) + )) + except PermissionError as err: + raise PermissionError("Error while trying to access temporary directory for templates: {0}".format( + to_native(err) + )) + + try: + template_file_path = path.join(temp_template_dir, file_path) + with open(template_file_path, mode="w", encoding=self.encoding) as template: + template.write(rendered_contents) + # There could be encoding errors. + except IOError as err: + raise IOError("An error ocurred while writing the rendered template for {0}: {1}".format( + file_path, + to_native(err) + )) + except ValueError as err: + raise ValueError("An error ocurred while writing the rendered template for {0}: {1}".format( + file_path, + to_native(err) + )) + + return temp_template_dir, template_file_path + + def render_dir_template(self, variables): + """Loads all templates from a directory and renders + them using the Jinja2 environment configured in the object. + + Arguments: + variables (dict): Dictionary containing the variables and + their values that will be substituted in the template. + + Returns: + tuple -- Filepath to a temporary directory that contains the + rendered templates, and the complete filepath to the + rendered templates' directory. + + Raises: + TemplateNotFound: When the template file doesn't exist in the + template directory. + TemplateError: When rendering of the template fails. + FileExistsError: When there is an error while trying to create the + temp directory for rendered templates. + PermissionError: When there is an error accessing the temp directory. + OSError: When there is an error while trying to create the + temp directory for rendered templates. + IOError: When there is an error writing the rendered template. + ValueError: When there is an error writing the rendered template. + """ + try: + temp_parent_dir = tempfile.mkdtemp() + last_dir = os.path.basename(self.template_dir) + temp_template_dir = os.path.join(temp_parent_dir, last_dir) + os.makedirs(temp_template_dir, exist_ok=True) + except FileExistsError as err: + raise FileExistsError("Unable to create directory for rendered templates: {0}".format( + to_native(err) + )) + except PermissionError as err: + raise PermissionError("Error while trying to access temporary directory: {0}".format( + to_native(err) + )) + except OSError as err: + raise OSError("Error while trying to access temporary directory: {0}".format( + to_native(err) + )) + + for path, subdirs, files in os.walk(self.template_dir): + for template_file in files: + relative_dir = os.path.relpath(path, self.template_dir) + file_path = os.path.normpath(os.path.join(relative_dir, template_file)) + + try: + template = self.templating_env.get_template(file_path) + rendered_contents = template.render(variables) + except jinja2.TemplateNotFound as err: + raise jinja2.TemplateNotFound("Template {0} was not found: {1}".format( + file_path, + to_native(err) + )) + except jinja2.TemplateError as err: + raise jinja2.TemplateError("Error while rendering {0}: {1}".format( + file_path, + to_native(err) + )) + + try: + template_file_path = os.path.join(temp_template_dir, file_path) + os.makedirs(os.path.dirname(template_file_path), exist_ok=True) + with open(template_file_path, mode="w", encoding=self.encoding) as temp: + temp.write(rendered_contents) + except IOError as err: + raise IOError("An error ocurred while writing the rendered template for {0}: {1}".format( + file_path, + to_native(err) + )) + except ValueError as err: + raise ValueError("An error ocurred while writing the rendered template for {0}: {1}".format( + file_path, + to_native(err) + )) + + return temp_parent_dir, temp_template_dir diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 6a3be27b8..0998f2a0e 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -364,6 +364,9 @@ type: str required: false +extends_documentation_fragment: + - ibm.ibm_zos_core.template + notes: - Destination data sets are assumed to be in catalog. When trying to copy to an uncataloged data set, the module assumes that the data set does @@ -2688,8 +2691,8 @@ def run_module(module, arg_def): try: if encoding: # 'conv_path' points to the converted src file or directory - if is_mvs_dest: - encoding["to"] = encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET + # if is_mvs_dest: + # encoding["to"] = encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET conv_path = copy_handler.convert_encoding(src, temp_path, encoding) @@ -2869,6 +2872,30 @@ def main(): sms_management_class=dict(type="str", required=False), ) ), + use_template=dict(type='bool', default=False), + template_parameters=dict( + type='dict', + required=False, + options=dict( + variable_start_string=dict(type='str', default='{{'), + variable_end_string=dict(type='str', default='}}'), + block_start_string=dict(type='str', default='{%'), + block_end_string=dict(type='str', default='%}'), + comment_start_string=dict(type='str', default='{#'), + comment_end_string=dict(type='str', default='#}'), + line_statement_prefix=dict(type='str', required=False), + line_comment_prefix=dict(type='str', required=False), + lstrip_blocks=dict(type='bool', default=False), + trim_blocks=dict(type='bool', default=True), + keep_trailing_newline=dict(type='bool', default=False), + newline_sequence=dict( + type='str', + default='\n', + choices=['\n', '\r', '\r\n'] + ), + auto_reload=dict(type='bool', default=False), + ) + ), is_uss=dict(type='bool'), is_pds=dict(type='bool'), is_src_dir=dict(type='bool'), @@ -2918,6 +2945,27 @@ def main(): sms_management_class=dict(arg_type="str", required=False), ) ), + + use_template=dict(arg_type='bool', required=False), + template_parameters=dict( + arg_type='dict', + required=False, + options=dict( + variable_start_string=dict(arg_type='str', required=False), + variable_end_string=dict(arg_type='str', required=False), + block_start_string=dict(arg_type='str', required=False), + block_end_string=dict(arg_type='str', required=False), + comment_start_string=dict(arg_type='str', required=False), + comment_end_string=dict(arg_type='str', required=False), + line_statement_prefix=dict(arg_type='str', required=False), + line_comment_prefix=dict(arg_type='str', required=False), + lstrip_blocks=dict(arg_type='bool', required=False), + trim_blocks=dict(arg_type='bool', required=False), + keep_trailing_newline=dict(arg_type='bool', required=False), + newline_sequence=dict(arg_type='str', required=False), + auto_reload=dict(arg_type='bool', required=False), + ) + ), ) if ( diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 97cbbc4a7..bc9f8ff19 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -130,6 +130,10 @@ required: false type: str default: IBM-1047 + +extends_documentation_fragment: + - ibm.ibm_zos_core.template + notes: - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). @@ -796,6 +800,30 @@ def run_module(): wait_time_s=dict(type="int", default=10), max_rc=dict(type="int", required=False), temp_file=dict(type="path", required=False), + use_template=dict(type='bool', default=False), + template_parameters=dict( + type='dict', + required=False, + options=dict( + variable_start_string=dict(type='str', default='{{'), + variable_end_string=dict(type='str', default='}}'), + block_start_string=dict(type='str', default='{%'), + block_end_string=dict(type='str', default='%}'), + comment_start_string=dict(type='str', default='{#'), + comment_end_string=dict(type='str', default='#}'), + line_statement_prefix=dict(type='str', required=False), + line_comment_prefix=dict(type='str', required=False), + lstrip_blocks=dict(type='bool', default=False), + trim_blocks=dict(type='bool', default=True), + keep_trailing_newline=dict(type='bool', default=False), + newline_sequence=dict( + type='str', + default='\n', + choices=['\n', '\r', '\r\n'] + ), + auto_reload=dict(type='bool', default=False), + ) + ), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index db6262bc9..5604527a3 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -55,6 +55,38 @@ 00000003A record """ +TEMPLATE_CONTENT = """ +This is a Jinja2 test: {{ var }} + +{# This is a comment. #} + +If: +{% if if_var is divisibleby 5 %} +Condition is true. +{% endif %} + +Inside a loop: +{% for i in array %} +Current element: {{ i }} +{% endfor %} +""" + +TEMPLATE_CONTENT_NON_DEFAULT_MARKERS = """ +This is a Jinja2 test: (( var )) + +#% This is a comment. %# + +If: +{% if if_var is divisibleby 5 %} +Condition is true. +{% endif %} + +Inside a loop: +{% for i in array %} +Current element: (( i )) +{% endfor %} +""" + # SHELL_EXECUTABLE = "/usr/lpp/rsusr/ported/bin/bash" SHELL_EXECUTABLE = "/bin/sh" TEST_PS = "IMSTESTL.IMS01.DDCHKPT" @@ -148,6 +180,16 @@ def populate_dir(dir_path): infile.write(DUMMY_DATA) +def create_template_file(dir_path, use_default_markers=True, encoding="utf-8"): + content = TEMPLATE_CONTENT if use_default_markers else TEMPLATE_CONTENT_NON_DEFAULT_MARKERS + template_path = os.path.join(dir_path, "template") + + with open(template_path, "w", encoding=encoding) as infile: + infile.write(content) + + return template_path + + def populate_dir_crlf_endings(dir_path): for i in range(5): with open(os.path.join(dir_path, "file{0}".format(i)), "wb") as infile: @@ -1190,6 +1232,264 @@ def test_copy_non_existent_file_fails(ansible_zos_module, is_remote): @pytest.mark.uss +@pytest.mark.template +@pytest.mark.parametrize("encoding", ["utf-8", "iso8859-1"]) +def test_copy_template_file(ansible_zos_module, encoding): + hosts = ansible_zos_module + dest_path = "/tmp/new_dir" + temp_dir = tempfile.mkdtemp() + + try: + temp_template = create_template_file( + temp_dir, + use_default_markers=True, + encoding=encoding + ) + dest_template = os.path.join(dest_path, os.path.basename(temp_template)) + + hosts.all.file(path=dest_path, state="directory") + + # Adding the template vars to each host. + template_vars = dict( + var="This should be rendered", + if_var=5, + array=[1, 2, 3] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + copy_result = hosts.all.zos_copy( + src=temp_template, + dest=dest_path, + use_template=True, + encoding={ + "from": encoding, + "to": "IBM-1047" + } + ) + + verify_copy = hosts.all.shell( + cmd="cat {0}".format(dest_template), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_template + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "{{" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "{#" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + finally: + hosts.all.file(path=dest_path, state="absent") + shutil.rmtree(temp_dir) + + +@pytest.mark.uss +@pytest.mark.template +def test_copy_template_dir(ansible_zos_module): + hosts = ansible_zos_module + dest_path = "/tmp/new_dir" + + # Ensuring there's a traling slash to copy the contents of the directory. + temp_dir = os.path.normpath(tempfile.mkdtemp()) + temp_dir = "{0}/".format(temp_dir) + + temp_subdir_a = os.path.join(temp_dir, "subdir_a") + temp_subdir_b = os.path.join(temp_dir, "subdir_b") + os.makedirs(temp_subdir_a) + os.makedirs(temp_subdir_b) + + try: + temp_template_a = create_template_file(temp_subdir_a, use_default_markers=True) + temp_template_b = create_template_file(temp_subdir_b, use_default_markers=True) + dest_template_a = os.path.join( + dest_path, + "subdir_a", + os.path.basename(temp_template_a) + ) + dest_template_b = os.path.join( + dest_path, + "subdir_b", + os.path.basename(temp_template_b) + ) + + hosts.all.file(path=dest_path, state="directory") + + # Adding the template vars to each host. + template_vars = dict( + var="This should be rendered", + if_var=5, + array=[1, 2, 3] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + copy_result = hosts.all.zos_copy( + src=temp_dir, + dest=dest_path, + use_template=True, + force=True + ) + + verify_copy_a = hosts.all.shell( + cmd="cat {0}".format(dest_template_a), + executable=SHELL_EXECUTABLE, + ) + verify_copy_b = hosts.all.shell( + cmd="cat {0}".format(dest_template_b), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_path + for v_cp in verify_copy_a.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "{{" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "{#" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + for v_cp in verify_copy_b.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "{{" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "{#" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + finally: + hosts.all.file(path=dest_path, state="absent") + shutil.rmtree(temp_dir) + + +@pytest.mark.uss +@pytest.mark.template +def test_copy_template_file_with_non_default_markers(ansible_zos_module): + hosts = ansible_zos_module + dest_path = "/tmp/new_dir" + temp_dir = tempfile.mkdtemp() + + try: + temp_template = create_template_file(temp_dir, use_default_markers=False) + dest_template = os.path.join(dest_path, os.path.basename(temp_template)) + + hosts.all.file(path=dest_path, state="directory") + + # Adding the template vars to each host. + template_vars = dict( + var="This should be rendered", + if_var=5, + array=[1, 2, 3] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + copy_result = hosts.all.zos_copy( + src=temp_template, + dest=dest_path, + use_template=True, + template_parameters=dict( + variable_start_string="((", + variable_end_string="))", + comment_start_string="#%", + comment_end_string="%#" + ) + ) + + verify_copy = hosts.all.shell( + cmd="cat {0}".format(dest_template), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_template + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "((" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "#%" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + finally: + hosts.all.file(path=dest_path, state="absent") + shutil.rmtree(temp_dir) + + +@pytest.mark.seq +@pytest.mark.pdse +@pytest.mark.template +def test_copy_template_file_to_dataset(ansible_zos_module): + hosts = ansible_zos_module + dest_dataset = "USER.TEST.TEMPLATE" + temp_dir = tempfile.mkdtemp() + + try: + temp_template = create_template_file(temp_dir, use_default_markers=True) + + # Adding the template vars to each host. + template_vars = dict( + var="This should be rendered", + if_var=5, + array=[1, 2, 3] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + copy_result = hosts.all.zos_copy( + src=temp_template, + dest=dest_dataset, + use_template=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest_dataset), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_dataset + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + # Checking that all markers got replaced. + assert "{{" not in v_cp.get("stdout") + assert "{%" not in v_cp.get("stdout") + # Checking comments didn't get rendered. + assert "{#" not in v_cp.get("stdout") + # Checking that the vars where substituted. + assert template_vars["var"] in v_cp.get("stdout") + assert "Condition is true." in v_cp.get("stdout") + assert "Current element: 2" in v_cp.get("stdout") + finally: + hosts.all.zos_data_set(name=dest_dataset, state="absent") + shutil.rmtree(temp_dir) + + @pytest.mark.parametrize("src", [ dict(src="/etc/profile", is_remote=False), dict(src="/etc/profile", is_remote=True),]) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 3364d12da..b7b1ec5f0 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -19,10 +19,10 @@ import tempfile import pytest import re +import os from pprint import pprint - # ############################################################################## # Configure the job card as needed, most common keyword parameters: # CLASS: Used to achieve a balance between different types of jobs and avoid @@ -118,9 +118,49 @@ /* """ +JCL_TEMPLATES = { + "Default": """//{{ pgm_name }} JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM +{# This comment should not be part of the JCL #} +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD {{ input_dataset }} +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +{{ message }} +/* +//SYSUT2 DD SYSOUT=* +// +""", + + "Custom": """//(( pgm_name )) JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM +//STEP0001 EXEC PGM=IEBGENER +(# This comment should not be part of the JCL #) +//SYSIN DD (( input_dataset )) +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +(( message )) +/* +//SYSUT2 DD SYSOUT=* +// +""", + + "Loop": """//JINJA JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM +//STEP0001 EXEC PGM=IEFBR14 +{% for item in steps %} +//SYS{{ item.step_name }} DD {{ item.dd }} +{% endfor %} +Hello, world! +/* +//SYSUT2 DD SYSOUT=* +// +""" +} + JCL_FILE_CONTENTS_NO_DSN = """//* //****************************************************************************** -//* Job containing a non existent DSN that will force an error. +//* Job containing a non existent DSN that will force an error. //* Returns: //* ret_code->(code=null, msg=JCLERR ?, msg_text=JCLERR, msg_code=?) //* msg --> The JCL submitted with job id JOB00532 but there was an error, @@ -152,11 +192,10 @@ # // # """ - JCL_FILE_CONTENTS_JCL_ERROR_INT = """//* //****************************************************************************** //* Another job containing no job card resulting in a JCLERROR with an value. It -//* won't always be 952, it will increment. +//* won't always be 952, it will increment. //* Returns: //* ret_code->(code=null, msg=JCL ERROR 952, msg_text=JCLERR, msg_code=null) //* msg --> The JCL submitted with job id JOB00728 but there was an error, @@ -192,7 +231,6 @@ // """ - JCL_FILE_CONTENTS_TYPRUN_SCAN = """//* //****************************************************************************** //* Job containing a TYPRUN=SCAN that will cause JES to run a syntax check and @@ -216,7 +254,6 @@ // """ - TEMP_PATH = "/tmp/jcl" DATA_SET_NAME = "imstestl.ims1.test05" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" @@ -513,6 +550,68 @@ def test_job_submit_max_rc(ansible_zos_module, args): hosts.all.file(path=tmp_file.name, state="absent") +@pytest.mark.template +@pytest.mark.parametrize("args", [ + dict( + template="Default", + options=dict( + keep_trailing_newline=False + ) + ), + dict( + template="Custom", + options=dict( + keep_trailing_newline=False, + variable_start_string="((", + variable_end_string="))", + comment_start_string="(#", + comment_end_string="#)" + ) + ), + dict( + template="Loop", + options=dict( + keep_trailing_newline=False + ) + ) +]) +def test_job_submit_jinja_template(ansible_zos_module, args): + try: + hosts = ansible_zos_module + + tmp_file = tempfile.NamedTemporaryFile(delete=False) + with open(tmp_file.name, "w") as f: + f.write(JCL_TEMPLATES[args["template"]]) + + template_vars = dict( + pgm_name="HELLO", + input_dataset="DUMMY", + message="Hello, world", + steps=[ + dict(step_name="IN", dd="DUMMY"), + dict(step_name="PRINT", dd="SYSOUT=*"), + dict(step_name="UT1", dd="*") + ] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + results = hosts.all.zos_job_submit( + src=tmp_file.name, + location="LOCAL", + use_template=True, + template_parameters=args["options"] + ) + + for result in results.contacted.values(): + assert result.get('changed') is True + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + + finally: + os.remove(tmp_file.name) + + def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: @@ -541,6 +640,7 @@ def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): assert result.get("jobs")[0].get("job_id") is not None assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: diff --git a/tests/pytest.ini b/tests/pytest.ini index c395f66f7..a9324aaae 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -7,4 +7,5 @@ markers = uss: uss test cases. seq: sequential data sets test cases. pdse: partitioned data sets test cases. - vsam: VSAM data sets test cases. \ No newline at end of file + vsam: VSAM data sets test cases. + template: Jinja2 templating test cases. \ No newline at end of file From fc84aa486a59468c6ec185f9ae5ae3ec0f8b285d Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 17 Jul 2023 20:22:44 -0600 Subject: [PATCH 140/495] Module zos_unarchive & zos_archive (#755) * Initial commit * Created template for zos_unarchive * Initial boiler plate for unarchive * Added unarchive handler selection * Added file/data set search * Adding AMATERSE unpacking * Added support for terse unarchive * Remove zos_archive module from the branch * Adding RECEIVE/XMIT support * Added temporary dataset removal * Adding RECEIVE as an mvscmd * Add RECEIVE using mvs_cmd * Add unpacked datasets display * Added display of unpacked targets support for multiple data sets * Added alias to options * Added include/ exclude options * Added include for xmit and terse * Modified include for all * Adding volume selection for SMS managed * Added list support for MVS archives * Removed unused var * Add force support for mvs data sets * Add archive listing for tar, bz2, and gz * Add unarchive all for tar, gz and bz2 * Added include/exclude support for tar, gz, bz2 * Add mvs_cmd amaterse * Modify mvs_cmd call * Add archive.py * Add latest zos_archive * Refactor mvs_cmd * Remove comments * Adding tests for zos_archive * Added unzip * Added arcroot logic and tarfile * Added changed logic * lastest changes * Multiple changes to zos_archive and zos_unarchive and its tests * Added support for pax * added list tests * Created action plugin for zos_unarchive * Add support for remove * Adding tests for exclusion list USS * Added tests * Add dest_data_set info * Adding multiple test cases and updated docs * Added test for mode * Removed unused stuff * Modified XMIT command * Added expanded paths support for mvs data sets * Added ugly multiple data set tests * Added various new tests for mvs data sets * Added new tests and default dest data set * Added default dest data set * Fixed pep8 issues * Added docs * Added docs * Fixed various sanity issues * removed abc.ABC * Added filters * modified fdict * polish test cases * Added tests * Added record length change * Fixed record length error * fix pylint issue * Add env cleaning in when terse or xmit fails * Moved locking tests below * Added tests for multiple data set in unarchive and modified test archive and unarchive names * Added tests for zos_unarchive * Added replace into zos_data_set invocation * Added replace * Added docs * Added * Added docs * Cleaned docs * Added permission change in zos_unarchive * Added mode support for USS files * Added is_binary option * Added zos_archive and zos_unarchive rst * Updated docs * Updated docs * Updated docs * Removed debug statements * fix pylint issues * Added get restored data sets * Removed replace flag * Added lock process removal * Removed print statements and blank lines * Removed print statements * Removed unused imports * added missing test * Completed uncompleted doc string * Fixed a bug that removed the source when not needed * Fixed pep8 issue * Added removal in correct place * fixed a bug that left behind tmp data sets * Added changes for deleting tmp data sets + pr code review comments * Added a notes and seealso section in the docs * Changed name to xmit_log_data_set * Added comments * Added comments about format * Added more description in terse_pack * Added mode description * Added description for tmp_hlq * Added description for remove * Added 'a' into some statements * Modified dest_state description * Updated option descriptions * Changed badzip error msg * changed adrdssu multiple data set error message * Added tso xmit error message * changed adrdssu multiple data set error message * modified description msg * Updated path to src and changed multiple docs * Fixed module doc issues * Removed unused error and modified error messages * Changed parameter result get * Updated docs in zos_unarchive * Updates docs * Updated docs * Modified mode * Changed src in action * Updated include docs * Updated include docs * Updated docs * Enhanced is_binary support * Fixed dest_data_set issue * Changed docs * Added changes to compute space primary * Added changes to compute space primary * Changed binary test case * Updated docs and test cases * Added xmit_log_data_set docs * Modified dest description * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updaded expanded sources * Updated docs * Updated docs * Updated docs * Updated docs and added dest_data_set * Added dest_data_set * Added primary_space * Fixed pep8 * updated dest_volumes comments * added xmit_log_data_set docs * Updated docs about mode * Added is binary comment * Removed is_binary * Removed is_binary from source * Updated targets list * Updated force docs * Updated the force docs * Updated exclude docs * Updated message for dest in zos_archive * Changed dest to required: true * Removed exclusion_patterns * Updated group in zos_archive * Corrected mode * Updated wording in owner * Updated src * Updated docs * Updated template * Fixed sanity issues * Updated PDS/PDSE to mayus * fixed mem creation * Fixed space type issue * Modifed test for des_data_set * fixed test * Improved dest_data_set * updated docs * updated dest_data_set type * Added latest docs --- .../enhancement_feature.issue.yml | 2 + docs/source/modules/zos_archive.rst | 463 +++++++ docs/source/modules/zos_unarchive.rst | 444 ++++++ plugins/action/zos_unarchive.py | 121 ++ plugins/module_utils/mvs_cmd.py | 24 + plugins/modules/zos_archive.py | 1215 +++++++++++++++++ plugins/modules/zos_unarchive.py | 1156 ++++++++++++++++ .../modules/test_zos_archive_func.py | 900 ++++++++++++ .../modules/test_zos_unarchive_func.py | 988 ++++++++++++++ tests/sanity/ignore-2.13.txt | 2 + tests/sanity/ignore-2.14.txt | 2 + tests/sanity/ignore-2.15.txt | 2 + 12 files changed, 5319 insertions(+) create mode 100644 docs/source/modules/zos_archive.rst create mode 100644 docs/source/modules/zos_unarchive.rst create mode 100644 plugins/action/zos_unarchive.py create mode 100644 plugins/modules/zos_archive.py create mode 100644 plugins/modules/zos_unarchive.py create mode 100644 tests/functional/modules/test_zos_archive_func.py create mode 100644 tests/functional/modules/test_zos_unarchive_func.py diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index f5bc9325f..3e1763091 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -20,6 +20,7 @@ body: description: Select which modules are being reported in this enhancement or feature. You can select more than one. multiple: true options: + - zos_archive - zos_apf - zos_backup_restore - zos_blockinfile @@ -39,6 +40,7 @@ body: - zos_operator_action_query - zos_ping - zos_tso_command + - zos_unarchive validations: required: true - type: textarea diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst new file mode 100644 index 000000000..bb4383f74 --- /dev/null +++ b/docs/source/modules/zos_archive.rst @@ -0,0 +1,463 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_archive.py + +.. _zos_archive_module: + + +zos_archive -- Archive files and data sets on z/OS. +=================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Create or extend an archive on a remote z/OS system. +- Sources for archiving must be on the remote z/OS system. +- Supported sources are USS (UNIX System Services) or z/OS data sets. +- The archive remains on the remote z/OS system. +- For supported archive formats, see option ``format``. + + + + + + +Parameters +---------- + + +src + List of names or globs of UNIX System Services (USS) files, PS (sequential data sets), PDS, PDSE to compress or archive. + + USS file paths should be absolute paths. + + MVS data sets supported types are: ``SEQ``, ``PDS``, ``PDSE``. + + VSAMs are not supported. + + | **required**: True + | **type**: list + | **elements**: str + + +format + The compression type and corresponding options to use when archiving data. + + | **required**: False + | **type**: dict + + + name + The compression format to use. + + | **required**: False + | **type**: str + | **default**: gz + | **choices**: bz2, gz, tar, zip, terse, xmit, pax + + + format_options + Options specific to a compression format. + + | **required**: False + | **type**: dict + + + terse_pack + Compression option for use with the terse format, *name=terse*. + + Pack will compress records in a data set so that the output results in lossless data compression. + + Spack will compress records in a data set so the output results in complex data compression. + + Spack will produce smaller output and take approximately 3 times longer than pack compression. + + | **required**: False + | **type**: str + | **choices**: PACK, SPACK + + + xmit_log_data_set + Provide the name of a data set to store xmit log output. + + If the data set provided does not exist, the program will create it. + + If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB + + When providing the *xmit_log_data_set* name, ensure there is adequate space. + + | **required**: False + | **type**: str + + + use_adrdssu + If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using ``xmit`` or ``terse``. + + | **required**: False + | **type**: bool + + + + +dest + The remote absolute path or data set where the archive should be created. + + *dest* can be a USS file or MVS data set name. + + If *dest* has missing parent directories, they will be created. + + If *dest* is a nonexistent USS file, it will be created. + + Destination data set attributes can be set using *dest_data_set*. + + | **required**: True + | **type**: str + + +exclude + Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from path list and glob expansion. + + Patterns (wildcards) can contain one of the following: ?, *. + + * matches everything. + + ? matches any single character. + + | **required**: False + | **type**: list + | **elements**: str + + +group + Name of the group that will own the archive file. + + When left unspecified, it uses the current group of the current use unless you are root, in which case it can preserve the previous ownership. + + This option is only applicable if ``dest`` is USS, otherwise ignored. + + | **required**: False + | **type**: str + + +mode + The permission of the destination archive file. + + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + + The mode may also be specified as a symbolic mode (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. + + *mode=preserve* means that the file will be given the same permissions as the source file. + + | **required**: False + | **type**: str + + +owner + Name of the user that should own the archive file, as would be passed to the chown command. + + When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. + + This option is only applicable if ``dest`` is USS, otherwise ignored. + + | **required**: False + | **type**: str + + +remove + Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *path*. + + | **required**: False + | **type**: bool + + +dest_data_set + Data set attributes to customize a ``dest`` data set to be archived into. + + | **required**: False + | **type**: dict + + + name + Desired name for destination dataset. + + | **required**: False + | **type**: str + + + type + Organization of the destination + + | **required**: False + | **type**: str + | **default**: SEQ + | **choices**: SEQ + + + space_primary + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + + The unit of space used is set using *space_type*. + + | **required**: False + | **type**: int + + + space_secondary + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + + The unit of space used is set using *space_type*. + + | **required**: False + | **type**: int + + + space_type + If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. + + Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + + | **required**: False + | **type**: str + | **choices**: K, M, G, CYL, TRK + + + record_format + If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + + Choices are case-insensitive. + + | **required**: False + | **type**: str + | **choices**: FB, VB, FBA, VBA, U + + + record_length + The length of each record in the data set, in bytes. + + For variable data sets, the length must include the 4-byte prefix area. + + Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0. + + | **required**: False + | **type**: int + + + block_size + The block size to use for the data set. + + | **required**: False + | **type**: int + + + directory_blocks + The number of directory blocks to allocate to the data set. + + | **required**: False + | **type**: int + + + sms_storage_class + The storage class for an SMS-managed dataset. + + Required for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + sms_data_class + The data class for an SMS-managed dataset. + + Optional for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + sms_management_class + The management class for an SMS-managed dataset. + + Optional for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + +tmp_hlq + Override the default high level qualifier (HLQ) for temporary data sets. + + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. + + | **required**: False + | **type**: str + + +force + If set to ``true`` and the remote file or data set ``dest`` will be deleted. Otherwise it will be created with the ``dest_data_set`` attributes or default values if ``dest_data_set`` is not specified. + + If set to ``false``, the file or data set will only be copied if the destination does not exist. + + If set to ``false`` and destination exists, the module exits with a note to the user. + + | **required**: False + | **type**: bool + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + # Simple archive + - name: Archive file into a tar + zos_archive: + path: /tmp/archive/foo.txt + dest: /tmp/archive/foo_archive_test.tar + format: + name: tar + + # Archive multiple files + - name: Compress list of files into a zip + zos_archive: + path: + - /tmp/archive/foo.txt + - /tmp/archive/bar.txt + dest: /tmp/archive/foo_bar_archive_test.zip + format: + name: zip + + # Archive one data set into terse + - name: Compress data set into a terse + zos_archive: + path: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + + # Use terse with different options + - name: Compress data set into a terse, specify pack algorithm and use adrdssu + zos_archive: + path: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + format_options: + terse_pack: "SPACK" + use_adrdssu: True + + # Use a pattern to store + - name: Compress data set pattern using xmit + zos_archive: + path: "USER.ARCHIVE.*" + exclude_sources: "USER.ARCHIVE.EXCLUDE.*" + dest: "USER.ARCHIVE.RESULT.XMIT" + format: + name: xmit + + + + +Notes +----- + +.. note:: + This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos_fetch to retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. + + When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. + + + +See Also +-------- + +.. seealso:: + + - :ref:`zos_fetch_module` + - :ref:`zos_unarchive_module` + + + + +Return Values +------------- + + +state + The state of the input ``src``. + + ``absent`` when the source files or data sets were removed. + + ``present`` when the source files or data sets were not removed. + + ``incomplete`` when ``remove`` was true and the source files or data sets were not removed. + + | **returned**: always + | **type**: str + +dest_state + The state of the *dest* file or data set. + + ``absent`` when the file does not exist. + + ``archive`` when the file is an archive. + + ``compress`` when the file is compressed, but not an archive. + + ``incomplete`` when the file is an archive, but some files under *path* were not found. + + | **returned**: success + | **type**: str + +missing + Any files or data sets that were missing from the source. + + | **returned**: success + | **type**: list + +archived + Any files or data sets that were compressed or added to the archive. + + | **returned**: success + | **type**: list + +arcroot + If ``src`` is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. + + | **returned**: always + | **type**: str + +expanded_sources + The list of matching paths from the src option. + + | **returned**: always + | **type**: list + +expanded_exclude_sources + The list of matching exclude paths from the exclude option. + + | **returned**: always + | **type**: list + diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst new file mode 100644 index 000000000..ecced2362 --- /dev/null +++ b/docs/source/modules/zos_unarchive.rst @@ -0,0 +1,444 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_unarchive.py + +.. _zos_unarchive_module: + + +zos_unarchive -- Unarchive files and data sets in z/OS. +======================================================= + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- The ``zos_unarchive`` module unpacks an archive after optionally transferring it to the remote system. +- For supported archive formats, see option ``format``. +- Supported sources are USS (UNIX System Services) or z/OS data sets. +- Mixing MVS data sets with USS files for unarchiving is not supported. +- The archive is sent to the remote as binary, so no encoding is performed. + + + + + + +Parameters +---------- + + +src + The remote absolute path or data set of the archive to be uncompressed. + + *src* can be a USS file or MVS data set name. + + USS file paths should be absolute paths. + + MVS data sets supported types are ``SEQ``, ``PDS``, ``PDSE``. + + | **required**: True + | **type**: str + + +format + The compression type and corresponding options to use when archiving data. + + | **required**: True + | **type**: dict + + + name + The compression format to use. + + | **required**: True + | **type**: str + | **choices**: bz2, gz, tar, zip, terse, xmit, pax + + + format_options + Options specific to a compression format. + + | **required**: False + | **type**: dict + + + xmit_log_data_set + Provide the name of a data set to store xmit log output. + + If the data set provided does not exist, the program will create it. + + If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB + + When providing the *xmit_log_data_set* name, ensure there is adequate space. + + | **required**: False + | **type**: str + + + use_adrdssu + If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using ``xmit`` or ``terse``. + + | **required**: False + | **type**: bool + + + dest_volumes + When *use_adrdssu=True*, specify the volume the data sets will be written to. + + If no volume is specified, storage management rules will be used to determine the volume where the file will be unarchived. + + If the storage administrator has specified a system default unit name and you do not set a volume name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. + + | **required**: False + | **type**: list + | **elements**: str + + + + +dest + The remote absolute path or data set where the content should be unarchived to. + + *dest* can be a USS file, directory or MVS data set name. + + If dest has missing parent directories, they will not be created. + + | **required**: False + | **type**: str + + +group + Name of the group that will own the file system objects. + + When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. + + This option is only applicable if ``dest`` is USS, otherwise ignored. + + | **required**: False + | **type**: str + + +mode + The permission of the uncompressed files. + + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + + The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. + + *mode=preserve* means that the file will be given the same permissions as the source file. + + | **required**: False + | **type**: str + + +owner + Name of the user that should own the filesystem object, as would be passed to the chown command. + + When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. + + | **required**: False + | **type**: str + + +include + A list of directories, files or data set names to extract from the archive. + + When ``include`` is set, only those files will we be extracted leaving the remaining files in the archive. + + Mutually exclusive with exclude. + + | **required**: False + | **type**: list + | **elements**: str + + +exclude + List the directory and file or data set names that you would like to exclude from the unarchive action. + + Mutually exclusive with include. + + | **required**: False + | **type**: list + | **elements**: str + + +list + Will list the contents of the archive without unpacking. + + | **required**: False + | **type**: bool + + +dest_data_set + Data set attributes to customize a ``dest`` data set that the archive will be copied into. + + | **required**: False + | **type**: dict + + + name + Desired name for destination dataset. + + | **required**: False + | **type**: str + + + type + Organization of the destination + + | **required**: False + | **type**: str + | **default**: SEQ + | **choices**: SEQ, PDS, PDSE + + + space_primary + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + + The unit of space used is set using *space_type*. + + | **required**: False + | **type**: int + + + space_secondary + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + + The unit of space used is set using *space_type*. + + | **required**: False + | **type**: int + + + space_type + If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. + + Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + + | **required**: False + | **type**: str + | **choices**: K, M, G, CYL, TRK + + + record_format + If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + + Choices are case-insensitive. + + | **required**: False + | **type**: str + | **choices**: FB, VB, FBA, VBA, U + + + record_length + The length of each record in the data set, in bytes. + + For variable data sets, the length must include the 4-byte prefix area. + + Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0. + + | **required**: False + | **type**: int + + + block_size + The block size to use for the data set. + + | **required**: False + | **type**: int + + + directory_blocks + The number of directory blocks to allocate to the data set. + + | **required**: False + | **type**: int + + + key_offset + The key offset to use when creating a KSDS data set. + + *key_offset* is required when *type=KSDS*. + + *key_offset* should only be provided when *type=KSDS* + + | **required**: False + | **type**: int + + + key_length + The key length to use when creating a KSDS data set. + + *key_length* is required when *type=KSDS*. + + *key_length* should only be provided when *type=KSDS* + + | **required**: False + | **type**: int + + + sms_storage_class + The storage class for an SMS-managed dataset. + + Required for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + sms_data_class + The data class for an SMS-managed dataset. + + Optional for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + sms_management_class + The management class for an SMS-managed dataset. + + Optional for SMS-managed datasets that do not match an SMS-rule. + + Not valid for datasets that are not SMS-managed. + + Note that all non-linear VSAM datasets are SMS-managed. + + | **required**: False + | **type**: str + + + +tmp_hlq + Override the default high level qualifier (HLQ) for temporary data sets. + + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. + + | **required**: False + | **type**: str + + +force + If set to true and the remote file or data set dest exists, the dest will be deleted. + + | **required**: False + | **type**: bool + + +remote_src + If set to true, ``zos_unarchive`` retrieves the archive from the remote system. + + If set to false, ``zos_unarchive`` searches the local machine (Ansible controller) for the archive. + + | **required**: False + | **type**: bool + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + # Simple extract + - name: Copy local tar file and unpack it on the managed z/OS node. + zos_unarchive: + path: "./files/archive_folder_test.tar" + format: + name: tar + + # use include + - name: Unarchive a bzip file selecting only a file to unpack. + zos_unarchive: + path: "/tmp/test.bz2" + format: + name: bz2 + include: + - 'foo.txt' + + # Use exclude + - name: Unarchive a terse data set and excluding data sets from unpacking. + zos_unarchive: + path: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + exclude: + - USER.ARCHIVE.TEST1 + - USER.ARCHIVE.TEST2 + + # List option + - name: List content from XMIT + zos_unarchive: + path: "USER.ARCHIVE.RESULT.XMIT" + format: + name: xmit + format_options: + use_adrdssu: True + list: True + + + + +Notes +----- + +.. note:: + VSAMs are not supported. + + + +See Also +-------- + +.. seealso:: + + - :ref:`zos_unarchive_module` + + + + +Return Values +------------- + + +path + File path or data set name unarchived. + + | **returned**: always + | **type**: str + +dest_path + Destination path where archive was extracted. + + | **returned**: always + | **type**: str + +targets + List of files or data sets in the archive. + + | **returned**: success + | **type**: list + | **elements**: str + +missing + Any files or data sets not found during extraction. + + | **returned**: success + | **type**: str + diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py new file mode 100644 index 000000000..7c310a4a3 --- /dev/null +++ b/plugins/action/zos_unarchive.py @@ -0,0 +1,121 @@ +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ansible.plugins.action import ActionBase +from ansible.utils.display import Display +from ansible.module_utils.parsing.convert_bool import boolean +import os +import copy +from ansible_collections.ibm.ibm_zos_core.plugins.action.zos_copy import ActionModule as ZosCopyActionModule + + +USS_SUPPORTED_FORMATS = ['tar', 'zip', 'bz2', 'pax', 'gz'] +MVS_SUPPORTED_FORMATS = ['terse', 'xmit'] + +display = Display() + + +def _process_boolean(arg, default=False): + try: + return boolean(arg) + except TypeError: + return default + + +class ActionModule(ActionBase): + def run(self, tmp=None, task_vars=None): + if task_vars is None: + task_vars = dict() + + result = super(ActionModule, self).run(tmp, task_vars) + + if result.get("skipped"): + return result + + module_args = self._task.args.copy() + + if module_args.get("remote_src", False): + result.update( + self._execute_module( + module_name="ibm.ibm_zos_core.zos_unarchive", + module_args=module_args, + task_vars=task_vars, + ) + ) + else: + source = module_args.get("src") + force = _process_boolean(module_args.get("force")) + format = self._task.args.get("format") + format_name = format.get("name") + copy_module_args = dict() + dest_data_set = format.get("dest_data_set") + dest = "" + if source.startswith('~'): + source = os.path.expanduser(source) + source = os.path.realpath(source) + + if format_name in USS_SUPPORTED_FORMATS: + dest = self._execute_module( + module_name="tempfile", module_args={}, task_vars=task_vars, + ).get("path") + elif format_name in MVS_SUPPORTED_FORMATS: + tmp_hlq = module_args.get("tmp_hlq") if module_args.get("tmp_hlq") is not None else "" + cmd_res = self._execute_module( + module_name="command", + module_args=dict( + _raw_params="mvstmp {0}".format(tmp_hlq) + ), + task_vars=task_vars, + ) + dest = cmd_res.get("stdout") + if dest_data_set is None: + if format_name == 'terse': + dest_data_set = dict(type='SEQ', record_format='FB', record_length=1024) + if format_name == 'xmit': + dest_data_set = dict(type='SEQ', record_format='FB', record_length=80) + else: + # Raise unsupported format name + None + + copy_module_args.update( + dict( + src=source, + dest=dest, + dest_data_set=dest_data_set, + force=force, + is_binary=True, + ) + ) + copy_task = copy.deepcopy(self._task) + copy_task.args = copy_module_args + zos_copy_action_module = ZosCopyActionModule(task=copy_task, + connection=self._connection, + play_context=self._play_context, + loader=self._loader, + templar=self._templar, + shared_loader_obj=self._shared_loader_obj) + result.update(zos_copy_action_module.run(task_vars=task_vars)) + + module_args["src"] = dest + display.vvv(u"Copy args {0}".format(result), host=self._play_context.remote_addr) + + result.update( + self._execute_module( + module_name="ibm.ibm_zos_core.zos_unarchive", + module_args=module_args, + task_vars=task_vars, + ) + ) + return result diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index bf9b28556..21d2b5a7e 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -153,6 +153,30 @@ def iehlist(cmd, dds=None, authorized=False): return _run_mvs_command("IEHLIST", cmd, dds, authorized) +def amaterse(cmd="", dds=None, authorized=False): + """AMATERSE is a service aid program that operates in problem state. + You can use AMATERSE to pack a data set before transmitting a copy + to another site, typically employing FTP as the transmission mechanism. + A complementary unpack service is provided to create a similar data set + at the receiving site. + Arguments: + dds {dict} -- Any DD statements to pass to MVS command + authorized {bool} -- Whether the command should be run in authorized + mode + """ + return _run_mvs_command("AMATERSE", "", dds, authorized) + + +def adrdssu(cmd, dds=None, authorized=False): + """The ADRDSSU program enables you to copy SMS-compressed data without + having to decompress the data and also provides support for copying + wildcard-named files. + Is a DFSMSdss utility that provides backup and recovery functions + at both the data set and volume levels. + """ + return _run_mvs_command("ADRDSSU", cmd, dds, authorized) + + def _run_mvs_command(pgm, cmd, dd=None, authorized=False): """Run a particular MVS command. diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py new file mode 100644 index 000000000..8b887e1bf --- /dev/null +++ b/plugins/modules/zos_archive.py @@ -0,0 +1,1215 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: zos_archive +version_added: "1.7.0" +author: + - Oscar Fernando Flores Garcia (@fernandofloresg) +short_description: Archive files and data sets on z/OS. + +description: + - Create or extend an archive on a remote z/OS system. + - Sources for archiving must be on the remote z/OS system. + - Supported sources are USS (UNIX System Services) or z/OS data sets. + - The archive remains on the remote z/OS system. + - For supported archive formats, see option C(format). + +options: + src: + description: + - List of names or globs of UNIX System Services (USS) files, + PS (sequential data sets), PDS, PDSE to compress or archive. + - USS file paths should be absolute paths. + - "MVS data sets supported types are: C(SEQ), C(PDS), C(PDSE)." + - VSAMs are not supported. + type: list + required: true + elements: str + format: + description: + - The compression type and corresponding options to use when archiving + data. + type: dict + required: false + suboptions: + name: + description: + - The compression format to use. + type: str + required: false + default: gz + choices: + - bz2 + - gz + - tar + - zip + - terse + - xmit + - pax + format_options: + description: + - Options specific to a compression format. + type: dict + required: false + suboptions: + terse_pack: + description: + - Compression option for use with the terse format, + I(name=terse). + - Pack will compress records in a data set so that the output + results in lossless data compression. + - Spack will compress records in a data set so the output results + in complex data compression. + - Spack will produce smaller output and take approximately 3 + times longer than pack compression. + type: str + required: false + choices: + - PACK + - SPACK + xmit_log_data_set: + description: + - Provide the name of a data set to store xmit log output. + - If the data set provided does not exist, the program + will create it. + - "If the data set provided exists, the data set must have + the following attributes: LRECL=255, BLKSIZE=3120, and + RECFM=VB" + - When providing the I(xmit_log_data_set) name, ensure there + is adequate space. + type: str + use_adrdssu: + description: + - If set to true, the C(zos_archive) module will use Data + Facility Storage Management Subsystem data set services + (DFSMSdss) program ADRDSSU to compress data sets into a + portable format before using C(xmit) or C(terse). + type: bool + default: false + dest: + description: + - The remote absolute path or data set where the archive should be + created. + - I(dest) can be a USS file or MVS data set name. + - If I(dest) has missing parent directories, they will be created. + - If I(dest) is a nonexistent USS file, it will be created. + - Destination data set attributes can be set using I(dest_data_set). + type: str + required: true + exclude: + description: + - Remote absolute path, glob, or list of paths, globs or data set name + patterns for the file, files or data sets to exclude from path list + and glob expansion. + - "Patterns (wildcards) can contain one of the following: ?, *." + - "* matches everything." + - "? matches any single character." + type: list + required: false + elements: str + group: + description: + - Name of the group that will own the archive file. + - When left unspecified, it uses the current group of the current use + unless you are root, in which case it can preserve the previous + ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false + mode: + description: + - The permission of the destination archive file. + - If C(dest) is USS, this will act as Unix file mode, otherwise + ignored. + - It should be noted that modes are octal numbers. + The user must either add a leading zero so that Ansible's YAML + parser knows it is an octal number (like C(0644) or C(01777))or + quote it (like C('644') or C('1777')) so Ansible receives a string + and can do its own conversion from string into number. Giving Ansible + a number without following one of these rules will end up with a + decimal number which will have unexpected results. + - The mode may also be specified as a symbolic mode + (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special + string 'preserve'. + - I(mode=preserve) means that the file will be given the same permissions + as the source file. + type: str + required: false + owner: + description: + - Name of the user that should own the archive file, as would be + passed to the chown command. + - When left unspecified, it uses the current user unless you are root, + in which case it can preserve the previous ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false + remove: + description: + - Remove any added source files , trees or data sets after module + L(zos_archive,./zos_archive.html) adds them to the archive. + Source files, trees and data sets are identified with option I(path). + type: bool + required: false + default: false + dest_data_set: + description: + - Data set attributes to customize a C(dest) data set to be archived into. + required: false + type: dict + suboptions: + name: + description: + - Desired name for destination dataset. + type: str + required: false + type: + description: + - Organization of the destination + type: str + required: false + default: SEQ + choices: + - SEQ + space_primary: + description: + - If the destination I(dest) data set does not exist , this sets the + primary space allocated for the data set. + - The unit of space used is set using I(space_type). + type: int + required: false + space_secondary: + description: + - If the destination I(dest) data set does not exist , this sets the + secondary space allocated for the data set. + - The unit of space used is set using I(space_type). + type: int + required: false + space_type: + description: + - If the destination data set does not exist, this sets the unit of + measurement to use when defining primary and secondary space. + - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + type: str + choices: + - K + - M + - G + - CYL + - TRK + required: false + record_format: + description: + - If the destination data set does not exist, this sets the format of + the + data set. (e.g C(FB)) + - Choices are case-insensitive. + required: false + choices: + - FB + - VB + - FBA + - VBA + - U + type: str + record_length: + description: + - The length of each record in the data set, in bytes. + - For variable data sets, the length must include the 4-byte prefix + area. + - "Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, + if U 0." + type: int + required: false + block_size: + description: + - The block size to use for the data set. + type: int + required: false + directory_blocks: + description: + - The number of directory blocks to allocate to the data set. + type: int + required: false + sms_storage_class: + description: + - The storage class for an SMS-managed dataset. + - Required for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + sms_data_class: + description: + - The data class for an SMS-managed dataset. + - Optional for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + sms_management_class: + description: + - The management class for an SMS-managed dataset. + - Optional for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + tmp_hlq: + description: + - Override the default high level qualifier (HLQ) for temporary data + sets. + - The default HLQ is the Ansible user used to execute the module and + if that is not available, then the environment variable value + C(TMPHLQ) is used. + required: false + type: str + force: + description: + - If set to C(true) and the remote file or data set C(dest) will be + deleted. Otherwise it will be created with the C(dest_data_set) + attributes or default values if C(dest_data_set) is not specified. + - If set to C(false), the file or data set will only be copied if the + destination does not exist. + - If set to C(false) and destination exists, the module exits with a + note to the user. + type: bool + default: false + required: false + +notes: + - This module does not perform a send or transmit operation to a remote + node. If you want to transport the archive you can use zos_fetch to + retrieve to the controller and then zos_copy or zos_unarchive for + copying to a remote or send to the remote and then unpack the archive + respectively. + - When packing and using C(use_adrdssu) flag the module will take up to two + times the space indicated in C(dest_data_set). + + +seealso: + - module: zos_fetch + - module: zos_unarchive +''' + +EXAMPLES = r''' +# Simple archive +- name: Archive file into a tar + zos_archive: + path: /tmp/archive/foo.txt + dest: /tmp/archive/foo_archive_test.tar + format: + name: tar + +# Archive multiple files +- name: Compress list of files into a zip + zos_archive: + path: + - /tmp/archive/foo.txt + - /tmp/archive/bar.txt + dest: /tmp/archive/foo_bar_archive_test.zip + format: + name: zip + +# Archive one data set into terse +- name: Compress data set into a terse + zos_archive: + path: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + +# Use terse with different options +- name: Compress data set into a terse, specify pack algorithm and use adrdssu + zos_archive: + path: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + format_options: + terse_pack: "SPACK" + use_adrdssu: True + +# Use a pattern to store +- name: Compress data set pattern using xmit + zos_archive: + path: "USER.ARCHIVE.*" + exclude_sources: "USER.ARCHIVE.EXCLUDE.*" + dest: "USER.ARCHIVE.RESULT.XMIT" + format: + name: xmit +''' + +RETURN = r''' +state: + description: + - The state of the input C(src). + - C(absent) when the source files or data sets were removed. + - C(present) when the source files or data sets were not removed. + - C(incomplete) when C(remove) was true and the source files or + data sets were not removed. + type: str + returned: always +dest_state: + description: + - The state of the I(dest) file or data set. + - C(absent) when the file does not exist. + - C(archive) when the file is an archive. + - C(compress) when the file is compressed, but not an archive. + - C(incomplete) when the file is an archive, but some files under + I(path) were not found. + type: str + returned: success +missing: + description: Any files or data sets that were missing from the source. + type: list + returned: success +archived: + description: + - Any files or data sets that were compressed or added to the + archive. + type: list + returned: success +arcroot: + description: + - If C(src) is a list of USS files, this returns the top most parent + folder of the list of files, otherwise is empty. + type: str + returned: always +expanded_sources: + description: The list of matching paths from the src option. + type: list + returned: always +expanded_exclude_sources: + description: The list of matching exclude paths from the exclude option. + type: list + returned: always +''' + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser, + data_set, + mvs_cmd) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + MissingZOAUImport, +) +import os +import tarfile +import zipfile +import abc +import glob +import re + + +try: + from zoautil_py import datasets +except Exception: + Datasets = MissingZOAUImport() + +XMIT_RECORD_LENGTH = 80 +AMATERSE_RECORD_LENGTH = 1024 + +STATE_ABSENT = 'absent' +STATE_ARCHIVE = 'archive' +STATE_COMPRESSED = 'compressed' +STATE_INCOMPLETE = 'incomplete' + + +def get_archive_handler(module): + """ + Return the proper archive handler based on archive format. + Arguments: + format: {str} + Returns: + Archive: {Archive} + + """ + format = module.params.get("format").get("name") + if format in ["tar", "gz", "bz2", "pax"]: + return TarArchive(module) + elif format == "terse": + return AMATerseArchive(module) + elif format == "xmit": + return XMITArchive(module) + return ZipArchive(module) + + +def strip_prefix(prefix, string): + return string[len(prefix):] if string.startswith(prefix) else string + + +def expand_paths(paths): + expanded_path = [] + for path in paths: + if '*' in path or '?' in path: + e_paths = glob.glob(path) + else: + e_paths = [path] + expanded_path.extend(e_paths) + return expanded_path + + +def is_archive(path): + return re.search(r'\.(tar|tar\.(gz|bz2|xz)|tgz|tbz2|zip|gz|bz2|xz|pax)$', os.path.basename(path), re.IGNORECASE) + + +class Archive(): + def __init__(self, module): + self.module = module + self.dest = module.params['dest'] + self.format = module.params.get("format").get("name") + self.remove = module.params['remove'] + self.changed = False + self.errors = [] + self.found = [] + self.targets = [] + self.archived = [] + self.not_found = [] + self.force = module.params['force'] + self.sources = module.params['src'] + self.arcroot = "" + self.expanded_sources = "" + self.expanded_exclude_sources = "" + self.dest_state = STATE_ABSENT + + def targets_exist(self): + return bool(self.targets) + + @abc.abstractmethod + def dest_exists(self): + pass + + @abc.abstractmethod + def dest_type(self): + pass + + @abc.abstractmethod + def update_permissions(self): + return + + @abc.abstractmethod + def find_targets(self): + pass + + @abc.abstractmethod + def _get_checksums(self, path): + pass + + @abc.abstractmethod + def dest_checksums(self): + pass + + @abc.abstractmethod + def is_different_from_original(self): + pass + + @abc.abstractmethod + def remove_targets(self): + pass + + @property + def result(self): + return { + 'archived': self.archived, + 'dest': self.dest, + 'arcroot': self.arcroot, + 'dest_state': self.dest_state, + 'changed': self.changed, + 'missing': self.not_found, + 'expanded_sources': list(self.expanded_sources), + 'expanded_exclude_sources': list(self.expanded_exclude_sources), + } + + +class USSArchive(Archive): + def __init__(self, module): + super(USSArchive, self).__init__(module) + self.original_checksums = self.dest_checksums() + if len(self.sources) == 1: + self.arcroot = os.path.dirname(os.path.commonpath(self.sources)) + else: + self.arcroot = os.path.commonpath(self.sources) + self.expanded_sources = expand_paths(self.sources) + self.expanded_exclude_sources = expand_paths(module.params['exclude']) + self.expanded_exclude_sources = "" if len(self.expanded_exclude_sources) == 0 else self.expanded_exclude_sources + + self.sources = sorted(set(self.expanded_sources) - set(self.expanded_exclude_sources)) + + def dest_exists(self): + return os.path.exists(self.dest) + + def dest_type(self): + return "USS" + + def update_permissions(self): + file_args = self.module.load_file_common_arguments(self.module.params, path=self.dest) + self.changed = self.module.set_fs_attributes_if_different(file_args, self.changed) + + def find_targets(self): + for path in self.sources: + if os.path.exists(path): + self.targets.append(path) + else: + self.not_found.append(path) + + def _get_checksums(self, path): + md5_cmd = "md5 -r \"{0}\"".format(path) + rc, out, err = self.module.run_command(md5_cmd) + checksums = out.split(" ")[0] + return checksums + + def dest_checksums(self): + if self.dest_exists(): + return self._get_checksums(self.dest) + return None + + def is_different_from_original(self): + if self.original_checksums is not None: + return self.original_checksums != self.dest_checksums() + return True + + def remove_targets(self): + for target in self.archived: + if os.path.isdir(target): + os.removedirs(target) + else: + os.remove(target) + + def archive_targets(self): + self.file = self.open(self.dest) + + try: + for target in self.targets: + if os.path.isdir(target): + for directory_path, directory_names, file_names in os.walk(target, topdown=True): + for directory_name in directory_names: + full_path = os.path.join(directory_path, directory_name) + self.add(full_path, strip_prefix(self.arcroot, full_path)) + + for file_name in file_names: + full_path = os.path.join(directory_path, file_name) + self.add(full_path, strip_prefix(self.arcroot, full_path)) + else: + self.add(target, strip_prefix(self.arcroot, target)) + except Exception as e: + self.dest_state = STATE_INCOMPLETE + if self.format == 'tar': + archive_format = self.format + else: + archive_format = 'tar.' + self.format + self.module.fail_json( + msg='Error when writing %s archive at %s: %s' % ( + archive_format, self.destination, e + ), + exception=e + ) + self.file.close() + + def add(self, source, arcname): + self._add(source, arcname) + self.archived.append(source) + + def get_state(self): + if not self.dest_exists(): + self.dest_state = STATE_ABSENT + else: + if is_archive(self.dest): + self.dest_state = STATE_ARCHIVE + if bool(self.not_found): + self.dest_state = STATE_INCOMPLETE + + +class TarArchive(USSArchive): + def __init__(self, module): + super(TarArchive, self).__init__(module) + + def open(self, path): + if self.format == 'tar': + file = tarfile.open(path, 'w') + elif self.format == 'pax': + file = tarfile.open(path, 'w', format=tarfile.GNU_FORMAT) + elif self.format in ('gz', 'bz2'): + file = tarfile.open(path, 'w|' + self.format) + return file + + def _add(self, source, arcname): + self.file.add(source, arcname) + + +class ZipArchive(USSArchive): + def __init__(self, module): + super(ZipArchive, self).__init__(module) + + def open(self, path): + try: + file = zipfile.ZipFile(path, 'w', zipfile.ZIP_DEFLATED, True) + except zipfile.BadZipFile: + self.module.fail_json( + msg="Improperly compressed zip file, unable to to open file {0} ".format(path) + ) + return file + + def _add(self, source, arcname): + self.file.write(source, arcname) + + +class MVSArchive(Archive): + def __init__(self, module): + super(MVSArchive, self).__init__(module) + self.original_checksums = self.dest_checksums() + self.use_adrdssu = module.params.get("format").get("format_options").get("use_adrdssu") + self.expanded_sources = self.expand_mvs_paths(self.sources) + self.expanded_exclude_sources = self.expand_mvs_paths(module.params['exclude']) + self.sources = sorted(set(self.expanded_sources) - set(self.expanded_exclude_sources)) + self.tmp_data_sets = list() + self.dest_data_set = module.params.get("dest_data_set") + self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set + self.tmphlq = module.params.get("tmp_hlq") + + def open(self): + pass + + def close(self): + pass + + def find_targets(self): + """ + Finds target datasets in host. + """ + for path in self.sources: + if data_set.DataSet.data_set_exists(path): + self.targets.append(path) + else: + self.not_found.append(path) + + def _compute_dest_data_set_size(self): + """ + Computes the attributes that the destination data set or temporary destination + data set should have in terms of size, record_length, etc. + """ + + """ + - Size of temporary DS for archive handling. + + If remote_src then we can get the source_size from archive on the system. + + If not remote_src then we can get the source_size from temporary_ds. + Both are named src so no problemo. + + If format is xmit, dest_data_set size is the same as source_size. + + If format is terse, dest_data_set size is different than the source_size, has to be greater, + but how much? In this case we can add dest_data_set option. + + Apparently the only problem is when format name is terse. + """ + + # Get the size from the system + default_size = 5 + dest_space_type = 'M' + dest_primary_space = int(default_size) + return dest_primary_space, dest_space_type + + def _create_dest_data_set( + self, + name=None, + replace=None, + type=None, + space_primary=None, + space_secondary=None, + space_type=None, + record_format=None, + record_length=None, + block_size=None, + directory_blocks=None, + sms_storage_class=None, + sms_data_class=None, + sms_management_class=None, + volumes=None, + tmp_hlq=None, + force=None, + ): + """Create a temporary data set. + + Arguments: + tmp_hlq(str): A HLQ specified by the user for temporary data sets. + + Returns: + str: Name of the temporary data set created. + """ + arguments = locals() + if name is None: + if tmp_hlq: + hlq = tmp_hlq + else: + rc, hlq, err = self.module.run_command("hlq") + hlq = hlq.replace('\n', '') + cmd = "mvstmphelper {0}.DZIP".format(hlq) + rc, temp_ds, err = self.module.run_command(cmd) + arguments.update(name=temp_ds.replace('\n', '')) + + if record_format is None: + arguments.update(record_format="FB") + if record_length is None: + arguments.update(record_length=80) + if type is None: + arguments.update(type="SEQ") + if space_primary is None: + arguments.update(space_primary=5) + if space_secondary is None: + arguments.update(space_secondary=3) + if space_type is None: + arguments.update(space_type="M") + arguments.pop("self") + changed = data_set.DataSet.ensure_present(**arguments) + return arguments["name"], changed + + def create_dest_ds(self, name): + """ + Create destination data set to use as an archive. + Arguments: + name: {str} + Returns: + name {str} - name of the newly created data set. + """ + record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH + changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) + # rc, out, err = self.module.run_command(cmd) + + # if not changed: + # self.module.fail_json( + # msg="Failed preparing {0} to be used as an archive".format(name), + # stdout=out, + # stderr=err, + # stdout_lines=cmd, + # rc=rc, + # ) + return name + + def dump_into_temp_ds(self, temp_ds): + """ + Dump src datasets identified as self.targets into a temporary dataset using ADRDSSU. + """ + dump_cmd = """ DUMP OUTDDNAME(TARGET) - + OPTIMIZE(4) DS(INCL( - """ + + for target in self.targets: + dump_cmd += "\n {0}, - ".format(target) + dump_cmd += '\n ) ' + + if self.force: + dump_cmd += '- \n ) TOL( ENQF IOER ) ' + + dump_cmd += ' )' + dds = dict(target="{0},old".format(temp_ds)) + rc, out, err = mvs_cmd.adrdssu(cmd=dump_cmd, dds=dds, authorized=True) + + if rc != 0: + self.module.fail_json( + msg="Failed executing ADRDSSU to archive {0}".format(temp_ds), + stdout=out, + stderr=err, + stdout_lines=dump_cmd, + rc=rc, + ) + return rc + + def _get_checksums(self, path): + md5_cmd = "md5 -r \"//'{0}'\"".format(path) + rc, out, err = self.module.run_command(md5_cmd) + checksums = out.split(" ")[0] + return checksums + + def dest_checksums(self): + if self.dest_exists(): + return self._get_checksums(self.dest) + return None + + def is_different_from_original(self): + if self.original_checksums is not None: + return self.original_checksums != self.dest_checksums() + return True + + def dest_type(self): + return "MVS" + + def dest_exists(self): + return data_set.DataSet.data_set_exists(self.dest) + + def remove_targets(self): + for target in self.archived: + data_set.DataSet.ensure_absent(target) + return + + def expand_mvs_paths(self, paths): + expanded_path = [] + for path in paths: + if '*' in path: + e_paths = datasets.listing(path) + e_paths = [path.name for path in e_paths] + else: + e_paths = [path] + expanded_path.extend(e_paths) + return expanded_path + + def get_state(self): + if not self.dest_exists(): + self.dest_state = STATE_ABSENT + else: + if bool(self.not_found): + self.dest_state = STATE_INCOMPLETE + elif bool(self.archived): + self.dest_state = STATE_ARCHIVE + + def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False): + """Removes any allocated data sets that won't be needed after module termination. + Arguments: + data_sets - {list(str)} : list of data sets to remove + uss_files - {list(str)} : list of uss files to remove + remove_targets - bool : Indicates if already unpacked data sets need to be removed too. + """ + if data_set is not None: + for ds in data_sets: + data_set.DataSet.ensure_absent(ds) + if uss_files is not None: + for file in uss_files: + os.remove(file) + if remove_targets: + for target in self.targets: + data_set.DataSet.ensure_absent(target) + + +class AMATerseArchive(MVSArchive): + def __init__(self, module): + super(AMATerseArchive, self).__init__(module) + self.pack_arg = module.params.get("format").get("format_options").get("terse_pack") + if self.pack_arg is None: + self.pack_arg = "SPACK" + + def add(self, src, archive): + """ + Archive src into archive using AMATERSE program. + Arguments: + src: {str} + archive: {str} + """ + dds = {'args': self.pack_arg, 'sysut1': src, 'sysut2': archive} + rc, out, err = mvs_cmd.amaterse(cmd="", dds=dds) + if rc != 0: + self.module.fail_json( + msg="Failed executing AMATERSE to archive {0} into {1}".format(src, archive), + stdout=out, + stderr=err, + rc=rc, + ) + self.archived = self.targets[:] + return rc + + def archive_targets(self): + """ + Add MVS Datasets to the AMATERSE Archive by creating a temporary dataset and dumping the source datasets into it. + """ + if self.use_adrdssu: + source, changed = self._create_dest_data_set( + type="SEQ", + record_format="U", + record_length=0, + tmp_hlq=self.tmphlq, + replace=True, + space_primary=self.dest_data_set.get("space_primary"), + space_type=self.dest_data_set.get("space_type")) + self.dump_into_temp_ds(source) + self.tmp_data_sets.append(source) + else: + # If we don't use a adrdssu container we cannot pack multiple data sets + if len(self.targets) > 1: + self.module.fail_json( + msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + source = self.targets[0] + # dest = self.create_dest_ds(self.dest) + dest, changed = self._create_dest_data_set( + name=self.dest, + replace=True, + type='SEQ', + record_format='FB', + record_length=AMATERSE_RECORD_LENGTH, + space_primary=self.dest_data_set.get("space_primary"), + space_type=self.dest_data_set.get("space_type")) + self.changed = self.changed or changed + self.add(source, dest) + self.clean_environment(data_sets=self.tmp_data_sets) + + +class XMITArchive(MVSArchive): + def __init__(self, module): + super(XMITArchive, self).__init__(module) + self.xmit_log_data_set = module.params.get("format").get("format_options").get("xmit_log_data_set") + + def add(self, src, archive): + """ + Archive src into archive using TSO XMIT. + Arguments: + src: {str} + archive: {str} + """ + log_option = "LOGDSNAME({0})".format(self.xmit_log_data_set) if self.xmit_log_data_set else "NOLOG" + xmit_cmd = """ XMIT A.B - + FILE(SYSUT1) OUTFILE(SYSUT2) - + {0} - + """.format(log_option) + dds = {"SYSUT1": "{0},shr".format(src), "SYSUT2": archive} + rc, out, err = mvs_cmd.ikjeft01(cmd=xmit_cmd, authorized=True, dds=dds) + if rc != 0: + self.module.fail_json( + msg="An error occurred while executing 'TSO XMIT' to archive {0} into {1}".format(src, archive), + stdout=out, + stderr=err, + rc=rc, + ) + self.archived = self.targets[:] + return rc + + def archive_targets(self): + """ + Adds MVS Datasets to the TSO XMIT Archive by creating a temporary dataset and dumping the source datasets into it. + """ + if self.use_adrdssu: + source, changed = self._create_dest_data_set( + type="SEQ", + record_format="U", + record_length=0, + tmp_hlq=self.tmphlq, + replace=True, + space_primary=self.dest_data_set.get("space_primary"), + space_type=self.dest_data_set.get("space_type")) + self.dump_into_temp_ds(source) + self.tmp_data_sets.append(source) + else: + # If we don't use a adrdssu container we cannot pack multiple data sets + if len(self.sources) > 1: + self.module.fail_json( + msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + source = self.sources[0] + # dest = self.create_dest_ds(self.dest) + dest, changed = self._create_dest_data_set( + name=self.dest, + replace=True, + type='SEQ', + record_format='FB', + record_length=XMIT_RECORD_LENGTH, + space_primary=self.dest_data_set.get("space_primary"), + space_type=self.dest_data_set.get("space_type")) + self.changed = self.changed or changed + self.add(source, dest) + self.clean_environment(data_sets=self.tmp_data_sets) + + +def run_module(): + module = AnsibleModule( + argument_spec=dict( + src=dict(type='list', elements='str', required=True), + dest=dict(type='str', required=True), + exclude=dict(type='list', elements='str'), + format=dict( + type='dict', + options=dict( + name=dict( + type='str', + default='gz', + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + ), + format_options=dict( + type='dict', + required=False, + options=dict( + terse_pack=dict( + type='str', + choices=['PACK', 'SPACK'], + ), + xmit_log_data_set=dict( + type='str', + ), + use_adrdssu=dict( + type='bool', + default=False, + ) + ), + ), + ) + ), + group=dict(type='str'), + mode=dict(type='str'), + owner=dict(type='str'), + remove=dict(type='bool', default=False), + dest_data_set=dict( + type='dict', + required=False, + options=dict( + name=dict( + type='str', required=False, + ), + type=dict( + type='str', + choices=['SEQ'], + required=False, + default="SEQ", + ), + space_primary=dict( + type='int', required=False), + space_secondary=dict( + type='int', required=False), + space_type=dict( + type='str', + choices=['K', 'M', 'G', 'CYL', 'TRK'], + required=False, + ), + record_format=dict( + type='str', + choices=["FB", "VB", "FBA", "VBA", "U"], + required=False + ), + record_length=dict(type='int', required=False), + block_size=dict(type='int', required=False), + directory_blocks=dict(type="int", required=False), + sms_storage_class=dict(type="str", required=False), + sms_data_class=dict(type="str", required=False), + sms_management_class=dict(type="str", required=False), + ) + ), + tmp_hlq=dict(type='str'), + force=dict(type='bool', default=False) + ), + supports_check_mode=True, + ) + + arg_defs = dict( + src=dict(type='list', elements='str', required=True), + dest=dict(type='str', required=True), + exclude=dict(type='list', elements='str', default=[]), + format=dict( + type='dict', + options=dict( + name=dict( + type='str', + default='gz', + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + ), + format_options=dict( + type='dict', + required=False, + options=dict( + terse_pack=dict( + type='str', + required=False, + choices=['PACK', 'SPACK'], + ), + xmit_log_data_set=dict( + type='str', + required=False, + ), + use_adrdssu=dict( + type='bool', + default=False, + ) + ), + default=dict( + terse_pack="SPACK", + xmit_log_data_set="", + use_adrdssu=False), + ), + ), + default=dict( + name="", + format_options=dict( + terse_pack="SPACK", + xmit_log_data_set="", + use_adrdssu=False + ) + ), + ), + group=dict(type='str'), + mode=dict(type='str'), + owner=dict(type='str'), + remove=dict(type='bool', default=False), + dest_data_set=dict( + arg_type='dict', + required=False, + options=dict( + name=dict(arg_type='str', required=False), + type=dict(arg_type='str', required=False, default="SEQ"), + space_primary=dict(arg_type='int', required=False), + space_secondary=dict( + arg_type='int', required=False), + space_type=dict(arg_type='str', required=False), + record_format=dict( + arg_type='str', required=False), + record_length=dict(type='int', required=False), + block_size=dict(arg_type='int', required=False), + directory_blocks=dict(arg_type="int", required=False), + sms_storage_class=dict(arg_type="str", required=False), + sms_data_class=dict(arg_type="str", required=False), + sms_management_class=dict(arg_type="str", required=False), + ) + ), + tmp_hlq=dict(type='qualifier_or_empty', default=''), + force=dict(type='bool', default=False) + ) + + result = dict( + changed=False, + original_message='', + message='' + ) + if module.check_mode: + module.exit_json(**result) + + try: + parser = better_arg_parser.BetterArgParser(arg_defs) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json(msg="Parameter verification failed", stderr=str(err)) + + archive = get_archive_handler(module) + + if archive.dest_exists() and not archive.force: + module.fail_json(msg="%s file exists. Use force flag to replace dest" % archive.dest) + + archive.find_targets() + if archive.targets_exist(): + archive.archive_targets() + if archive.remove: + archive.remove_targets() + if archive.dest_exists(): + if archive.dest_type() == "USS": + archive.update_permissions() + archive.changed = archive.is_different_from_original() + archive.get_state() + + module.exit_json(**archive.result) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py new file mode 100644 index 000000000..3f79fc789 --- /dev/null +++ b/plugins/modules/zos_unarchive.py @@ -0,0 +1,1156 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: zos_unarchive +version_added: "1.7.0" +author: + - Oscar Fernando Flores Garcia (@fernandofloresg) +short_description: Unarchive files and data sets in z/OS. +description: + - The C(zos_unarchive) module unpacks an archive after optionally + transferring it to the remote system. + - For supported archive formats, see option C(format). + - Supported sources are USS (UNIX System Services) or z/OS data sets. + - Mixing MVS data sets with USS files for unarchiving is not supported. + - The archive is sent to the remote as binary, so no encoding is performed. + + +options: + src: + description: + - The remote absolute path or data set of the archive to be uncompressed. + - I(src) can be a USS file or MVS data set name. + - USS file paths should be absolute paths. + - MVS data sets supported types are C(SEQ), C(PDS), C(PDSE). + type: str + required: true + format: + description: + - The compression type and corresponding options to use when archiving + data. + type: dict + required: true + suboptions: + name: + description: + - The compression format to use. + type: str + required: true + choices: + - bz2 + - gz + - tar + - zip + - terse + - xmit + - pax + format_options: + description: + - Options specific to a compression format. + type: dict + required: false + suboptions: + xmit_log_data_set: + description: + - Provide the name of a data set to store xmit log output. + - If the data set provided does not exist, the program + will create it. + - 'If the data set provided exists, the data set must have + the following attributes: LRECL=255, BLKSIZE=3120, and + RECFM=VB' + - When providing the I(xmit_log_data_set) name, ensure there + is adequate space. + type: str + use_adrdssu: + description: + - If set to true, the C(zos_archive) module will use Data + Facility Storage Management Subsystem data set services + (DFSMSdss) program ADRDSSU to uncompress data sets from + a portable format after using C(xmit) or C(terse). + type: bool + default: False + dest_volumes: + description: + - When I(use_adrdssu=True), specify the volume the data sets + will be written to. + - If no volume is specified, storage management rules will be + used to determine the volume where the file will be + unarchived. + - If the storage administrator has specified a system default + unit name and you do not set a volume name for + non-system-managed data sets, then the system uses the + volumes associated with the default unit name. Check with + your storage administrator to determine whether a default + unit name has been specified. + type: list + elements: str + dest: + description: + - The remote absolute path or data set where the content should be unarchived to. + - I(dest) can be a USS file, directory or MVS data set name. + - If dest has missing parent directories, they will not be created. + type: str + required: false + group: + description: + - Name of the group that will own the file system objects. + - When left unspecified, it uses the current group of the current user + unless you are root, in which case it can preserve the previous + ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false + mode: + description: + - The permission of the uncompressed files. + - If C(dest) is USS, this will act as Unix file mode, otherwise ignored. + - It should be noted that modes are octal numbers. + The user must either add a leading zero so that Ansible's YAML parser + knows it is an octal number (like C(0644) or C(01777))or quote it + (like C('644') or C('1777')) so Ansible receives a string and can do + its own conversion from string into number. Giving Ansible a number + without following one of these rules will end up with a decimal number + which will have unexpected results. + - The mode may also be specified as a symbolic mode + (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special + string `preserve`. + - I(mode=preserve) means that the file will be given the same permissions + as + the source file. + type: str + required: false + owner: + description: + - Name of the user that should own the filesystem object, as would be + passed to the chown command. + - When left unspecified, it uses the current user unless you are root, + in which case it can preserve the previous ownership. + type: str + required: false + include: + description: + - A list of directories, files or data set names to extract from the + archive. + - When C(include) is set, only those files will we be extracted leaving + the remaining files in the archive. + - Mutually exclusive with exclude. + type: list + elements: str + required: false + exclude: + description: + - List the directory and file or data set names that you would like to + exclude from the unarchive action. + - Mutually exclusive with include. + type: list + elements: str + required: false + list: + description: + - Will list the contents of the archive without unpacking. + type: bool + required: false + default: false + dest_data_set: + description: + - Data set attributes to customize a C(dest) data set that the archive will be copied into. + required: false + type: dict + suboptions: + name: + description: + - Desired name for destination dataset. + type: str + required: false + type: + description: + - Organization of the destination + type: str + required: false + default: SEQ + choices: + - SEQ + - PDS + - PDSE + space_primary: + description: + - If the destination I(dest) data set does not exist , this sets the + primary space allocated for the data set. + - The unit of space used is set using I(space_type). + type: int + required: false + space_secondary: + description: + - If the destination I(dest) data set does not exist , this sets the + secondary space allocated for the data set. + - The unit of space used is set using I(space_type). + type: int + required: false + space_type: + description: + - If the destination data set does not exist, this sets the unit of + measurement to use when defining primary and secondary space. + - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + type: str + choices: + - K + - M + - G + - CYL + - TRK + required: false + record_format: + description: + - If the destination data set does not exist, this sets the format of + the + data set. (e.g C(FB)) + - Choices are case-insensitive. + required: false + choices: + - FB + - VB + - FBA + - VBA + - U + type: str + record_length: + description: + - The length of each record in the data set, in bytes. + - For variable data sets, the length must include the 4-byte prefix + area. + - "Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, + if U 0." + type: int + required: false + block_size: + description: + - The block size to use for the data set. + type: int + required: false + directory_blocks: + description: + - The number of directory blocks to allocate to the data set. + type: int + required: false + key_offset: + description: + - The key offset to use when creating a KSDS data set. + - I(key_offset) is required when I(type=KSDS). + - I(key_offset) should only be provided when I(type=KSDS) + type: int + required: false + key_length: + description: + - The key length to use when creating a KSDS data set. + - I(key_length) is required when I(type=KSDS). + - I(key_length) should only be provided when I(type=KSDS) + type: int + required: false + sms_storage_class: + description: + - The storage class for an SMS-managed dataset. + - Required for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + sms_data_class: + description: + - The data class for an SMS-managed dataset. + - Optional for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + sms_management_class: + description: + - The management class for an SMS-managed dataset. + - Optional for SMS-managed datasets that do not match an SMS-rule. + - Not valid for datasets that are not SMS-managed. + - Note that all non-linear VSAM datasets are SMS-managed. + type: str + required: false + tmp_hlq: + description: + - Override the default high level qualifier (HLQ) for temporary data + sets. + - The default HLQ is the Ansible user used to execute the module and if + that is not available, then the environment variable value C(TMPHLQ) is + used. + type: str + required: false + force: + description: + - If set to true and the remote file or data set dest exists, the dest + will be deleted. + type: bool + required: false + default: false + remote_src: + description: + - If set to true, C(zos_unarchive) retrieves the archive from the remote + system. + - If set to false, C(zos_unarchive) searches the local machine (Ansible + controller) for the archive. + type: bool + required: false + default: false + +notes: + - VSAMs are not supported. + +seealso: + - module: zos_unarchive +''' + +EXAMPLES = r''' +# Simple extract +- name: Copy local tar file and unpack it on the managed z/OS node. + zos_unarchive: + path: "./files/archive_folder_test.tar" + format: + name: tar + +# use include +- name: Unarchive a bzip file selecting only a file to unpack. + zos_unarchive: + path: "/tmp/test.bz2" + format: + name: bz2 + include: + - 'foo.txt' + +# Use exclude +- name: Unarchive a terse data set and excluding data sets from unpacking. + zos_unarchive: + path: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + exclude: + - USER.ARCHIVE.TEST1 + - USER.ARCHIVE.TEST2 + +# List option +- name: List content from XMIT + zos_unarchive: + path: "USER.ARCHIVE.RESULT.XMIT" + format: + name: xmit + format_options: + use_adrdssu: True + list: True +''' + +RETURN = r''' +path: + description: + File path or data set name unarchived. + type: str + returned: always +dest_path: + description: + - Destination path where archive was extracted. + type: str + returned: always +targets: + description: + List of files or data sets in the archive. + type: list + elements: str + returned: success +missing: + description: + Any files or data sets not found during extraction. + type: str + returned: success +''' + +import abc +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser, + data_set, + mvs_cmd) +import re +import os +import zipfile +import tarfile +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + MissingZOAUImport, +) + +try: + from zoautil_py import datasets +except Exception: + Datasets = MissingZOAUImport() + +data_set_regex = r"(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}" + +XMIT_RECORD_LENGTH = 80 +AMATERSE_RECORD_LENGTH = 1024 + + +class Unarchive(): + def __init__(self, module): + self.module = module + self.src = module.params.get("src") + self.dest = module.params.get("dest") + self.format = module.params.get("format").get("name") + self.format_options = module.params.get("format").get("format_options") + self.tmphlq = module.params.get("tmp_hlq") + self.force = module.params.get("force") + self.targets = list() + self.include = module.params.get("include") + self.exclude = module.params.get("exclude") + self.list = module.params.get("list") + self.changed = False + self.missing = list() + self.remote_src = module.params.get("remote_src") + if self.dest == '': + self.dest = os.path.dirname(self.src) + + @abc.abstractmethod + def extract_src(self): + pass + + @abc.abstractmethod + def _list_content(self): + pass + + def src_exists(self): + return self.src and os.path.exists(self.src) + + def dest_type(self): + return "USS" + + def dest_unarchived(self): + return bool(self.targets) + + def update_permissions(self): + """ + Update permissions in unarchived files. + """ + for target in self.targets: + file_name = os.path.join(self.dest, target) + file_args = self.module.load_file_common_arguments(self.module.params, path=file_name) + self.module.set_fs_attributes_if_different(file_args, self.changed) + + @property + def result(self): + return { + 'src': self.src, + 'dest_path': self.dest, + 'changed': self.changed, + 'targets': self.targets, + 'missing': self.missing, + } + + +class TarUnarchive(Unarchive): + def __init__(self, module): + super(TarUnarchive, self).__init__(module) + + def open(self, path): + """Open an archive using tarfile lib for read. + + Arguments: + path(str): Path to a tar, pax, gz or bz2 file to be opened. + + Returns: + Return a TarFile object for the path name. + """ + if self.format == 'tar': + file = tarfile.open(path, 'r') + elif self.format in ('pax'): + file = tarfile.open(path, 'r', format=tarfile.GNU_FORMAT) + elif self.format in ('gz', 'bz2'): + file = tarfile.open(path, 'r:' + self.format) + else: + self.module.fail_json(msg="%s is not a valid archive format for listing contents" % self.format) + return file + + def list_archive_content(self, path): + self.targets = self._list_content(self.src) + + def _list_content(self, path): + """Returns a list of members in an archive. + + Arguments: + path(str): Path to a tar, pax, gz or bz2 file to list its contents. + + Returns: + list(str): List of members inside the archive. + """ + self.file = self.open(path) + members = self.file.getnames() + self.file.close() + return members + + def extract_src(self): + """Unpacks the contents of the archive stored in path into dest folder. + + """ + original_working_dir = os.getcwd() + # The function gets relative paths, so it changes the current working + # directory to the root of src. + os.chdir(self.dest) + self.file = self.open(self.src) + + files_in_archive = self.file.getnames() + if self.include: + for path in self.include: + if path not in files_in_archive: + self.missing.append(path) + else: + self.file.extract(path) + self.targets.append(path) + elif self.exclude: + for path in files_in_archive: + if path not in self.exclude: + self.file.extract(path) + self.targets.append(path) + else: + self.file.extractall(members=sanitize_members(self.file.getmembers(), self.dest, self.format)) + self.targets = files_in_archive + self.file.close() + # Returning the current working directory to what it was before to not + # interfere with the rest of the module. + os.chdir(original_working_dir) + self.changed = bool(self.targets) + + +class ZipUnarchive(Unarchive): + def __init__(self, module): + super(ZipUnarchive, self).__init__(module) + + def open(self, path): + """Unpacks the contents of the archive stored in path into dest folder. + + """ + try: + file = zipfile.ZipFile(path, 'r', zipfile.ZIP_DEFLATED, True) + except zipfile.BadZipFile: + self.module.fail_json( + msg="Improperly compressed zip file, unable to to open file {0} ".format(path) + ) + return file + + def list_archive_content(self): + self.targets = self._list_content(self.src) + + def _list_content(self, path): + """Returns a list of members in an archive. + + Arguments: + path(str): Path to a tar, pax, gz or bz2 file to list its contents. + + Returns: + list(str): List of members inside the archive. + """ + self.file = self.open(path) + members = self.file.namelist() + self.file.close() + return members + + def extract_src(self): + """Returns a list of members in an archive. + + Arguments: + path(str): Path to a tar, pax, gz or bz2 file to list its contents. + + Returns: + list(str): List of members inside the archive. + """ + original_working_dir = os.getcwd() + # The function gets relative paths, so it changes the current working + # directory to the root of src. + os.chdir(self.dest) + self.file = self.open(self.src) + + files_in_archive = self.file.namelist() + if self.include: + for path in self.include: + if path not in files_in_archive: + self.missing.append(path) + else: + self.file.extract(path) + self.targets.append(path) + elif self.exclude: + for path in files_in_archive: + if path not in self.exclude: + self.file.extract(path) + self.targets.append(path) + else: + self.file.extractall(members=sanitize_members(self.file.infolist(), self.dest, self.format)) + self.targets = files_in_archive + self.file.close() + # Returning the current working directory to what it was before to not + # interfere with the rest of the module. + os.chdir(original_working_dir) + self.changed = bool(self.targets) + + +class MVSUnarchive(Unarchive): + def __init__(self, module): + super(MVSUnarchive, self).__init__(module) + self.volumes = self.format_options.get("dest_volumes") + self.use_adrdssu = self.format_options.get("use_adrdssu") + self.dest_data_set = module.params.get("dest_data_set") + self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set + self.source_size = 0 + + def dest_type(self): + return "MVS" + + def _compute_dest_data_set_size(self): + """ + Computes the attributes that the destination data set or temporary destination + data set should have in terms of size, record_length, etc. + """ + + """ + - Size of temporary DS for archive handling. + + If remote_src then we can get the source_size from archive on the system. + + If not remote_src then we can get the source_size from temporary_ds. + Both are named src so no problemo. + + If format is xmit, dest_data_set size is the same as source_size. + + If format is terse, dest_data_set size is different than the source_size, has to be greater, + but how much? In this case we can add dest_data_set option. + + Apparently the only problem is when format name is terse. + """ + + # Get the size from the system + src_attributes = datasets.listing(self.src)[0] + # The size returned by listing is in bytes. + source_size = int(src_attributes.total_space) + if self.format == 'terse': + source_size = int(source_size * 1.5) + return source_size + + def _create_dest_data_set( + self, + name=None, + replace=None, + type=None, + space_primary=None, + space_secondary=None, + space_type=None, + record_format=None, + record_length=None, + block_size=None, + directory_blocks=None, + key_length=None, + key_offset=None, + sms_storage_class=None, + sms_data_class=None, + sms_management_class=None, + volumes=None, + tmp_hlq=None, + force=None, + ): + """Create a temporary data set. + + Arguments: + tmp_hlq(str): A HLQ specified by the user for temporary data sets. + + Returns: + str: Name of the temporary data set created. + """ + arguments = locals() + if name is None: + if tmp_hlq: + hlq = tmp_hlq + else: + rc, hlq, err = self.module.run_command("hlq") + hlq = hlq.replace('\n', '') + cmd = "mvstmphelper {0}.RESTORE".format(hlq) + rc, temp_ds, err = self.module.run_command(cmd) + arguments.update(name=temp_ds.replace('\n', '')) + if record_format is None: + arguments.update(record_format="FB") + if record_length is None: + arguments.update(record_length=80) + if type is None: + arguments.update(type="SEQ") + if space_primary is None: + arguments.update(space_primary=self._compute_dest_data_set_size()) + arguments.pop("self") + changed = data_set.DataSet.ensure_present(**arguments) + return arguments["name"], changed + + def _get_include_data_sets_cmd(self): + include_cmd = "INCL( " + for include_ds in self.include: + include_cmd += " '{0}', - \n".format(include_ds) + include_cmd += " ) - \n" + return include_cmd + + def _get_exclude_data_sets_cmd(self): + exclude_cmd = "EXCL( - \n" + for exclude_ds in self.exclude: + exclude_cmd += " '{0}', - \n".format(exclude_ds) + exclude_cmd += " ) - \n" + return exclude_cmd + + def _get_volumes(self): + volumes_cmd = "OUTDYNAM( - \n" + for volume in self.volumes: + volumes_cmd += " ('{0}'), - \n".format(volume) + volumes_cmd += " ) - \n" + return volumes_cmd + + def _restore(self, source): + """ + Calls ADDRSU using RESTORE to unpack the dump datasets. + + Arguments: + source(str): Name of the data set to use as archive in ADRDSSU restore operation. + + Returns: + int: Return code result of restore operation. + """ + filter = "INCL(**) " + volumes = "" + force = "REPLACE -\n TOLERATE(ENQFAILURE) " if self.force else "" + if self.include: + filter = self._get_include_data_sets_cmd() + if self.exclude: + filter = self._get_exclude_data_sets_cmd() + if self.volumes: + volumes = self._get_volumes() + restore_cmd = """ RESTORE INDD(ARCHIVE) - + DS( - + {0} ) - + {1} - + CATALOG - + {2} """.format(filter, volumes, force) + dds = dict(archive="{0},old".format(source)) + rc, out, err = mvs_cmd.adrdssu(cmd=restore_cmd, dds=dds, authorized=True) + self._get_restored_datasets(out) + + if rc != 0: + # AdrddssuRestoreError + unrestored_data_sets = self._get_unrestored_datasets(out) + unrestored_data_sets = ", ".join(unrestored_data_sets) + self.clean_environment(data_sets=[source], uss_files=[], remove_targets=True) + self.module.fail_json( + msg="Failed executing ADRDSSU to unarchive {0}. List of data sets not restored : {1}".format(source, unrestored_data_sets), + stdout=out, + stderr=err, + stdout_lines=restore_cmd, + rc=rc, + ) + return rc + + def src_exists(self): + return data_set.DataSet.data_set_exists(self.src) + + def _get_restored_datasets(self, output): + ds_list = list() + find_ds_list = re.findall(r"SUCCESSFULLY PROCESSED\n(?:.*\n)*", output) + if find_ds_list: + ds_list = re.findall(data_set_regex, find_ds_list[0]) + self.targets = ds_list + return ds_list + + def _get_unrestored_datasets(self, output): + ds_list = list() + output = output.split("SUCCESSFULLY PROCESSED")[0] + find_ds_list = re.findall(r"NOT PROCESSED FROM THE LOGICALLY FORMATTED DUMP TAPE DUE TO \n(?:.*\n)*", output) + if find_ds_list: + ds_list = re.findall(data_set_regex, find_ds_list[0]) + return ds_list + + @abc.abstractmethod + def unpack(self): + pass + + def extract_src(self): + """Extract the MVS path contents. + + """ + temp_ds = "" + if not self.use_adrdssu: + temp_ds, rc = self._create_dest_data_set(**self.dest_data_set) + rc = self.unpack(self.src, temp_ds) + else: + temp_ds, rc = self._create_dest_data_set(type="SEQ", + record_format="U", + record_length=0, + tmp_hlq=self.tmphlq, + replace=True) + self.unpack(self.src, temp_ds) + rc = self._restore(temp_ds) + datasets.delete(temp_ds) + self.changed = not rc + + if not self.remote_src: + datasets.delete(self.src) + return + + def _list_content(self, source): + restore_cmd = " RESTORE INDD(ARCHIVE) DS(INCL(**)) " + cmd = " mvscmdauth --pgm=ADRDSSU --archive={0},old --args='TYPRUN=NORUN' --sysin=stdin --sysprint=*".format(source) + rc, out, err = self.module.run_command(cmd, data=restore_cmd) + self._get_restored_datasets(out) + + def list_archive_content(self): + temp_ds, rc = self._create_dest_data_set(type="SEQ", record_format="U", record_length=0, tmp_hlq=self.tmphlq, replace=True) + self.unpack(self.src, temp_ds) + self._list_content(temp_ds) + datasets.delete(temp_ds) + if not self.remote_src: + datasets.delete(self.src) + + def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False): + """Removes any allocated data sets that won't be needed after module termination. + Arguments: + data_sets - {list(str)} : list of data sets to remove + uss_files - {list(str)} : list of uss files to remove + remove_targets - bool : Indicates if already unpacked data sets need to be removed too. + """ + if data_set is not None: + for ds in data_sets: + data_set.DataSet.ensure_absent(ds) + if uss_files is not None: + for file in uss_files: + os.remove(file) + if remove_targets: + for target in self.targets: + data_set.DataSet.ensure_absent(target) + + +class AMATerseUnarchive(MVSUnarchive): + def __init__(self, module): + super(AMATerseUnarchive, self).__init__(module) + + def unpack(self, src, dest): + """ + Unpacks using AMATerse, assumes the data set has only been packed once. + """ + dds = {'args': 'UNPACK', 'sysut1': src, 'sysut2': dest} + rc, out, err = mvs_cmd.amaterse(cmd="", dds=dds) + if rc != 0: + self.clean_environment(data_sets=[dest], uss_files=[], remove_targets=True) + self.module.fail_json( + msg="Failed executing AMATERSE to restore {0} into {1}".format(src, dest), + stdout=out, + stderr=err, + rc=rc, + ) + return rc + + +class XMITUnarchive(MVSUnarchive): + def __init__(self, module): + super(XMITUnarchive, self).__init__(module) + + def unpack(self, src, dest): + """ + Unpacks using XMIT. + + src is the archive + dest is the destination dataset + """ + unpack_cmd = """ + PROFILE NOPROMPT + RECEIVE INDSN('{0}') + DA('{1}') + """.format(src, dest) + rc, out, err = mvs_cmd.ikjeft01(cmd=unpack_cmd, authorized=True) + if rc != 0: + self.module.fail_json( + msg="Failed executing RECEIVE to restore {0} into {1}".format(src, dest), + stdout=out, + stderr=err, + rc=rc, + ) + return rc + + +def get_unarchive_handler(module): + format = module.params.get("format").get("name") + if format in ["tar", "gz", "bz2", "pax"]: + return TarUnarchive(module) + elif format == "terse": + return AMATerseUnarchive(module) + elif format == "xmit": + return XMITUnarchive(module) + return ZipUnarchive(module) + + +def tar_filter(member, dest_path): + name = member.name + if name.startswith(('/', os.sep)): + name = member.path.lstrip('/' + os.sep) + if os.path.isabs(name): + raise AbsolutePathError + target_path = os.path.realpath(os.path.join(dest_path, name)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise OutsideDestinationError(member, target_path) + if member.islnk() or member.issym(): + if os.path.isabs(member.linkname): + raise AbsoluteLinkError(member) + target_path = os.path.realpath(os.path.join(dest_path, member.linkname)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise LinkOutsideDestinationError(member, target_path) + + +def zip_filter(member, dest_path): + name = member.filename + if name.startswith(('/', os.sep)): + name = name.lstrip('/' + os.sep) + if os.path.isabs(name): + raise AbsolutePathError + target_path = os.path.realpath(os.path.join(dest_path, name)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise OutsideDestinationError(member, target_path) + + +def sanitize_members(members, dest, format): + """ + Filter inspired by (PEP 706) + - Refuse to extract any absolute path + - Refuse to extract any member with leading '/' + """ + dest_path = os.path.realpath(dest) + for member in members: + if format == 'zip': + zip_filter(member, dest_path) + else: + tar_filter(member, dest_path) + return members + + +class AbsolutePathError(Exception): + def __init__(self, tarinfo): + self.msg = "Unable to extract {0} as the files extracted can not contain an absolute path".format(tarinfo.name) + super().__init__(self.msg) + + +class OutsideDestinationError(Exception): + def __init__(self, tarinfo, path): + self.msg = 'Unable to extract {0} to {1}, which is outside the designated destination'.format(tarinfo.name, path) + super().__init__(self.msg) + + +class AbsoluteLinkError(Exception): + def __init__(self, tarinfo): + self.msg = '{0} is a symlink to an absolute path'.format(tarinfo.name) + super().__init__(self.msg) + + +class LinkOutsideDestinationError(Exception): + def __init__(self, tarinfo, path): + self.msg = 'Unable to extract {0} it would link to {1}, which is outside the designated destination'.format(tarinfo.name, path) + super().__init__() + + +def run_module(): + module = AnsibleModule( + argument_spec=dict( + src=dict(type='str', required=True), + dest=dict(type='str'), + include=dict(type='list', elements='str'), + exclude=dict(type='list', elements='str'), + list=dict(type='bool', default=False), + format=dict( + type='dict', + required=True, + options=dict( + name=dict( + type='str', + required=True, + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + ), + format_options=dict( + type='dict', + required=False, + options=dict( + xmit_log_data_set=dict( + type='str', + required=False, + ), + dest_volumes=dict( + type='list', + elements='str', + ), + use_adrdssu=dict( + type='bool', + default=False, + ) + ) + ), + ), + ), + group=dict(type='str'), + mode=dict(type='str'), + owner=dict(type='str'), + dest_data_set=dict( + type='dict', + required=False, + options=dict( + name=dict( + type='str', required=False, + ), + type=dict( + type='str', + choices=['SEQ', 'PDS', 'PDSE'], + required=False, + default='SEQ', + ), + space_primary=dict( + type='int', required=False), + space_secondary=dict( + type='int', required=False), + space_type=dict( + type='str', + choices=['K', 'M', 'G', 'CYL', 'TRK'], + required=False, + ), + record_format=dict( + type='str', + choices=["FB", "VB", "FBA", "VBA", "U"], + required=False + ), + record_length=dict(type='int', required=False), + block_size=dict(type='int', required=False), + directory_blocks=dict(type="int", required=False), + key_offset=dict(type="int", required=False, no_log=False), + key_length=dict(type="int", required=False, no_log=False), + sms_storage_class=dict(type="str", required=False), + sms_data_class=dict(type="str", required=False), + sms_management_class=dict(type="str", required=False), + ) + ), + tmp_hlq=dict(type='str'), + force=dict(type='bool', default=False), + remote_src=dict(type='bool', default=False), + ), + mutually_exclusive=[ + ['include', 'exclude'], + ], + supports_check_mode=True, + ) + + arg_defs = dict( + src=dict(type='str', required=True), + dest=dict(type='str', required=False, default=''), + include=dict(type='list', elements='str'), + exclude=dict(type='list', elements='str'), + list=dict(type='bool', default=False), + format=dict( + type='dict', + required=True, + options=dict( + name=dict( + type='str', + required=True, + default='gz', + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + ), + format_options=dict( + type='dict', + required=False, + options=dict( + xmit_log_data_set=dict( + type='str', + required=False, + ), + dest_volumes=dict( + type='list', + elements='str' + ), + use_adrdssu=dict( + type='bool', + default=False, + ), + ), + default=dict(xmit_log_data_set=""), + ) + ), + default=dict(name="", format_options=dict(xmit_log_data_set="")), + ), + dest_data_set=dict( + arg_type='dict', + required=False, + options=dict( + name=dict(arg_type='str', required=False), + type=dict(arg_type='str', required=False, default="SEQ"), + space_primary=dict(arg_type='int', required=False), + space_secondary=dict( + arg_type='int', required=False), + space_type=dict(arg_type='str', required=False), + record_format=dict( + arg_type='str', required=False), + record_length=dict(type='int', required=False), + block_size=dict(arg_type='int', required=False), + directory_blocks=dict(arg_type="int", required=False), + key_offset=dict(arg_type="int", required=False), + key_length=dict(arg_type="int", required=False), + sms_storage_class=dict(arg_type="str", required=False), + sms_data_class=dict(arg_type="str", required=False), + sms_management_class=dict(arg_type="str", required=False), + ) + ), + group=dict(type='str'), + mode=dict(type='str'), + owner=dict(type='str'), + tmp_hlq=dict(type='qualifier_or_empty', default=''), + force=dict(type='bool', default=False), + remote_src=dict(type='bool', default=False), + mutually_exclusive=[ + ['include', 'exclude'], + ], + ) + + try: + parser = better_arg_parser.BetterArgParser(arg_defs) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json(msg="Parameter verification failed", stderr=str(err)) + unarchive = get_unarchive_handler(module) + + if unarchive.list: + unarchive.list_archive_content() + module.exit_json(**unarchive.result) + + if not unarchive.src_exists(): + module.fail_json(msg="{0} does not exists, please provide a valid src.".format(module.params.get("src"))) + + unarchive.extract_src() + + if unarchive.dest_unarchived() and unarchive.dest_type() == "USS": + unarchive.update_permissions() + + module.exit_json(**unarchive.result) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py new file mode 100644 index 000000000..e3b4b4ba7 --- /dev/null +++ b/tests/functional/modules/test_zos_archive_func.py @@ -0,0 +1,900 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2020, 2022 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import, division, print_function +import time + +import pytest +__metaclass__ = type + +SHELL_EXECUTABLE = "/bin/sh" +USS_TEMP_DIR = "/tmp/archive" +USS_TEST_FILES = { f"{USS_TEMP_DIR}/foo.txt" : "foo sample content", + f"{USS_TEMP_DIR}/bar.txt": "bar sample content", + f"{USS_TEMP_DIR}/empty.txt":""} +USS_EXCLUSION_FILE = f"{USS_TEMP_DIR}/foo.txt" +TEST_PS = "USER.PRIVATE.TESTDS" +TEST_PDS = "USER.PRIVATE.TESTPDS" +HLQ = "USER" +MVS_DEST_ARCHIVE = "USER.PRIVATE.ARCHIVE" + +USS_DEST_ARCHIVE = "testarchive.dzp" + +STATE_ARCHIVED = 'archive' +STATE_INCOMPLETE = 'incomplete' + +USS_FORMATS = ['tar', 'zip', 'gz', 'bz2', 'pax'] + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + +def set_uss_test_env(ansible_zos_module, test_files): + for key, value in test_files.items(): + ansible_zos_module.all.shell( + cmd=f"echo \"{value}\" > \"{key}\"", + executable=SHELL_EXECUTABLE, + ) + +def create_multiple_data_sets(ansible_zos_module, base_name, n, type, ): + test_data_sets = [] + for i in range(n): + curr_ds = dict(name=base_name+str(i), + type=type, + state="present", + replace=True, + force=True) + test_data_sets.append(curr_ds) + + # Create data sets in batch + ansible_zos_module.all.zos_data_set( + batch=test_data_sets + ) + return test_data_sets + +def create_multiple_members(ansible_zos_module, pds_name, member_base_name, n): + test_members = [] + for i in range(n): + curr_ds = dict(name="{0}({1})".format(pds_name, member_base_name+str(i)), + type="member", + state="present", + replace=True, + force=True) + test_members.append(curr_ds) + ansible_zos_module.all.zos_data_set( + batch=test_members + ) + return test_members + +###################################################### +# +# USS TEST +# +###################################################### +""" +List of tests: +- test_uss_single_archive +- test_uss_single_archive_with_mode +- test_uss_single_archive_with_force_option +- test_uss_archive_multiple_files +- test_uss_archive_multiple_files_with_exclude +- test_uss_archive_remove_targets +""" + + +# Core functionality tests +# Test archive with no options +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_single_archive(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + assert "archive.{0}".format(format) in c_result.get("stdout") + + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_single_archive_with_mode(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + dest_mode = "0755" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + ), + mode=dest_mode) + stat_dest_res = hosts.all.stat(path=dest) + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + for stat_result in stat_dest_res.contacted.values(): + assert stat_result.get("stat").get("exists") is True + assert stat_result.get("stat").get("mode") == dest_mode + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_single_archive_with_force_option(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is True + assert result.get("changed") is False + + set_uss_test_env(hosts, USS_TEST_FILES) + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + ), + force=True,) + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +@pytest.mark.parametrize("path", [ + dict(files= f"{USS_TEMP_DIR}/*.txt", size=len(USS_TEST_FILES)), + dict(files=list(USS_TEST_FILES.keys()), size=len(USS_TEST_FILES)), + dict(files= f"{USS_TEMP_DIR}/" , size=len(USS_TEST_FILES) + 1), + ]) +def test_uss_archive_multiple_files(ansible_zos_module, format, path): + try: + hosts = ansible_zos_module + hosts.all.file(path=USS_TEMP_DIR, state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=path.get("files"), + dest=dest, + format=dict(name=format),) + + # resulting archived tag varies in size when a folder is archived using zip. + size = path.get("size") + + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + assert len(result.get("archived")) == size + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + assert f"archive.{format}" in c_result.get("stdout") + + finally: + hosts.all.file(path=USS_TEMP_DIR, state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +@pytest.mark.parametrize("path", [ + dict(files=list(USS_TEST_FILES.keys()), size=len(USS_TEST_FILES) - 1, exclude=[f'{USS_TEMP_DIR}/foo.txt']), + dict(files= f"{USS_TEMP_DIR}/" , size=len(USS_TEST_FILES) + 1, exclude=[]), + ]) +def test_uss_archive_multiple_files_with_exclude(ansible_zos_module, format, path): + try: + hosts = ansible_zos_module + hosts.all.file(path=USS_TEMP_DIR, state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=path.get("files"), + dest=dest, + format=dict(name=format), + exclude=path.get("exclude")) + + # resulting archived tag varies in size when a folder is archived using zip. + size = path.get("size") + + for result in archive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + assert len(result.get("archived")) == size + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + assert f"archive.{format}" in c_result.get("stdout") + finally: + hosts.all.file(path=USS_TEMP_DIR, state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_archive_remove_targets(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=USS_TEMP_DIR, state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + paths = list(USS_TEST_FILES.keys()) + archive_result = hosts.all.zos_archive(src=paths, + dest=dest, + format=dict(name=format), + remove=True) + + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest_state") == STATE_ARCHIVED + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + assert f"archive.{format}" in c_result.get("stdout") + for path in paths: + assert path not in c_result.get("stdout") + finally: + hosts.all.file(path=USS_TEMP_DIR, state="absent") + + +###################################################################### +# +# MVS data sets tests +# +###################################################################### + +""" +List of tests: +- test_mvs_archive_single_dataset +- test_mvs_archive_single_dataset_use_adrdssu +- test_mvs_archive_single_data_set_remove_target +- test_mvs_archive_multiple_data_sets +- test_mvs_archive_multiple_data_sets_use_adrdssu +- test_mvs_archive_multiple_data_sets_remove_target +- test_mvs_archive_multiple_data_sets_with_exclusion +- test_mvs_archive_multiple_data_sets_with_missing + +""" +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120, 1024] +) +@pytest.mark.parametrize( + # "record_format", ["FB", "VB", "FBA", "VBA", "U"], + "record_format", ["FB", "VB",], +) +def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + replace=True, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120, 1024] +) +@pytest.mark.parametrize( + # "record_format", ["FB", "VB", "FBA", "VBA", "U"], + "record_format", ["FB", "VB",], +) +def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + replace=True, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + format_dict["format_options"] = dict(use_adrdssu=True) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80], +) +@pytest.mark.parametrize( + "record_format", ["FB", "VB",], +) +def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + replace=True, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + remove=True, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert data_set.get("name") not in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set ): + try: + hosts = ansible_zos_module + + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=3, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + for ds in target_ds_list: + assert ds.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, format, data_set ): + try: + hosts = ansible_zos_module + + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=3, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + exclude = "{0}1".format(data_set.get("name")) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + exclude=exclude, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + for ds in target_ds_list: + if ds.get("name") == exclude: + assert exclude not in result.get("archived") + else: + assert ds.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set ): + try: + hosts = ansible_zos_module + + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=3, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + remove=True, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + for ds in target_ds_list: + assert ds.get("name") in result.get("archived") + assert ds.get("name") not in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, data_set ): + try: + hosts = ansible_zos_module + + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=3, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + # Remove ds to make sure is missing + missing_ds = data_set.get("name")+"1" + hosts.all.zos_data_set(name=missing_ds, state="absent") + path_list = [ds.get("name") for ds in target_ds_list] + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src=path_list, + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest_state") == STATE_INCOMPLETE + assert missing_ds in result.get("missing") + for ds in target_ds_list: + if ds.get("name") == missing_ds: + assert ds.get("name") not in result.get("archived") + else: + assert ds.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + ] +) +def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_set,): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + replace=True, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="SPACK") + + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(ds_to_write), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + + # pause to ensure c code acquires lock + time.sleep(5) + + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py new file mode 100644 index 000000000..a4bf5e007 --- /dev/null +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -0,0 +1,988 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2020, 2022 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import, division, print_function + +import pytest +import tempfile +from tempfile import mkstemp + +__metaclass__ = type + +SHELL_EXECUTABLE = "/bin/sh" +USS_TEMP_DIR = "/tmp/archive" +USS_TEST_FILES = { f"{USS_TEMP_DIR}/foo.txt" : "foo sample content", + f"{USS_TEMP_DIR}/bar.txt": "bar sample content", + f"{USS_TEMP_DIR}/empty.txt":""} +USS_EXCLUSION_FILE = f"{USS_TEMP_DIR}/foo.txt" +TEST_PS = "USER.PRIVATE.TESTDS" +TEST_PDS = "USER.PRIVATE.TESTPDS" +HLQ = "USER" +MVS_DEST_ARCHIVE = "USER.PRIVATE.ARCHIVE" + +USS_DEST_ARCHIVE = "testarchive.dzp" + +USS_FORMATS = ['tar', 'gz', 'bz2', 'zip', 'pax'] + +def set_uss_test_env(ansible_zos_module, test_files): + for key, value in test_files.items(): + ansible_zos_module.all.shell( + cmd=f"echo \"{value}\" > \"{key}\"", + executable=SHELL_EXECUTABLE, + ) + + +def create_multiple_data_sets(ansible_zos_module, base_name, n, type, ): + test_data_sets = [] + for i in range(n): + curr_ds = dict(name=base_name+str(i), + type=type, + state="present", + replace=True, + force=True) + test_data_sets.append(curr_ds) + + # Create data sets in batch + ansible_zos_module.all.zos_data_set( + batch=test_data_sets + ) + return test_data_sets + + +def create_multiple_members(ansible_zos_module, pds_name, member_base_name, n): + test_members = [] + for i in range(n): + curr_ds = dict(name="{0}({1})".format(pds_name, member_base_name+str(i)), + type="member", + state="present", + replace=True, + force=True) + test_members.append(curr_ds) + ansible_zos_module.all.zos_data_set( + batch=test_members + ) + return test_members + + +###################################################### +# +# USS TEST +# +###################################################### +""" +List of tests: +- test_uss_unarchive +- test_uss_unarchive_include +- test_uss_unarchive_exclude +- test_uss_unarchive_list +""" + + +# Core functionality tests +# Test unarchive with no options +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_unarchive(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + # remove files + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + remote_src=True, + ) + hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + for file in USS_TEST_FILES.keys(): + assert file[len(USS_TEMP_DIR)+1:] in c_result.get("stdout") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_unarchive_include(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + uss_files = [file[len(USS_TEMP_DIR)+1:] for file in USS_TEST_FILES] + include_list = uss_files[:2] + # remove files + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + include=include_list, + remote_src=True, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + for file in uss_files: + if file in include_list: + assert file in c_result.get("stdout") + else: + assert file not in c_result.get("stdout") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_unarchive_exclude(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + # remove files + uss_files = [file[len(USS_TEMP_DIR)+1:] for file in USS_TEST_FILES] + exclude_list = uss_files[:2] + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + exclude=exclude_list, + remote_src=True, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") + for c_result in cmd_result.contacted.values(): + for file in uss_files: + if file in exclude_list: + assert file not in c_result.get("stdout") + else: + assert file in c_result.get("stdout") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_unarchive_list(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + # remove files + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + remote_src=True, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + for file in USS_TEST_FILES.keys(): + assert file[len(USS_TEMP_DIR)+1:] in result.get("targets") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +@pytest.mark.uss +@pytest.mark.parametrize("format", USS_FORMATS) +def test_uss_single_archive_with_mode(ansible_zos_module, format): + try: + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + dest = f"{USS_TEMP_DIR}/archive.{format}" + dest_mode = "0755" + archive_result = hosts.all.zos_archive(src=list(USS_TEST_FILES.keys()), + dest=dest, + format=dict( + name=format + )) + for file in list(USS_TEST_FILES.keys()): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=dest, + format=dict( + name=format + ), + remote_src=True, + mode=dest_mode, + ) + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + dest_files = list(USS_TEST_FILES.keys()) + for file in dest_files: + stat_dest_res = hosts.all.stat(path=file) + for stat_result in stat_dest_res.contacted.values(): + assert stat_result.get("stat").get("exists") is True + assert stat_result.get("stat").get("mode") == dest_mode + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + + +###################################################################### +# +# MVS data sets tests +# +###################################################################### + +""" +List of tests: +- test_mvs_unarchive_single_data_set +- test_mvs_unarchive_single_data_set_use_adrdssu +- test_mvs_unarchive_multiple_data_sets_use_adrdssu +- test_mvs_unarchive_multiple_data_sets_include +- test_mvs_unarchive_multiple_data_sets_exclude +- test_mvs_unarchive_list +- test_mvs_unarchive_force +- test_mvs_unarchive_remote_src + +""" + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120] +) +@pytest.mark.parametrize( + "record_format", ["FB", "VB",], +) +def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + dest_data_set=dict(name=data_set.get("name"), + type="SEQ", + record_format=record_format, + record_length=record_length), + ) + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + dest_data_set=dict(name=data_set.get("name"), + type=data_set.get("dstype"), + record_format=record_format, + record_length=record_length), + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + # assert result.get("dest") == MVS_DEST_ARCHIVE + # assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert data_set.get("name") in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120, 1024] +) +@pytest.mark.parametrize( + "record_format", ["FB", "VB",], +) +def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + format_dict["format_options"] = dict(use_adrdssu=True) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + # assert response is positive + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True + ) + + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + # assert result.get("dest") == MVS_DEST_ARCHIVE + # assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert data_set.get("name") in c_result.get("stdout") + finally: + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, data_set): + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=1, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # remote data_sets from host + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + force=True + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format, data_set): + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=2, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + + # remote data_sets from host + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + include_ds = "{0}0".format(data_set.get("name")) + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + include=[include_ds], + ) + + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + if target_ds.get("name") == include_ds: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") in c_result.get("stdout") + else: + assert target_ds.get("name") not in result.get("targets") + assert target_ds.get("name") not in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format, data_set): + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=2, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # remote data_sets from host + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + exclude_ds = "{0}0".format(data_set.get("name")) + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + exclude=[exclude_ds], + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + if target_ds.get("name") == exclude_ds: + assert target_ds.get("name") not in result.get("targets") + assert target_ds.get("name") not in c_result.get("stdout") + else: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_set): + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=2, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + # remote data_sets from host + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + list=True + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + assert result.get("changed") is False + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") not in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ"), + dict(name=TEST_PDS, dstype="PDS"), + dict(name=TEST_PDS, dstype="PDSE"), + ] +) +@pytest.mark.parametrize( + "force", [ + True, + False, + ]) +def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, format, data_set, force): + """ + This force test creates some data sets and attempt to extract using force flag as + True and False, when True no issues are expected, as False proper error message should + be displayed. + """ + try: + hosts = ansible_zos_module + target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, + base_name=data_set.get("name"), + n=1, + type=data_set.get("dstype")) + ds_to_write = target_ds_list + if data_set.get("dstype") in ["PDS", "PDSE"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds.get("name"), + member_base_name="MEM", + n=3 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(use_adrdssu=True) + hosts.all.zos_archive( + src="{0}*".format(data_set.get("name")), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=MVS_DEST_ARCHIVE, + format=format_dict, + remote_src=True, + force=force + ) + # assert response is positive + for result in unarchive_result.contacted.values(): + if force: + assert result.get("changed") is True + assert result.get("failed", False) is False + assert result.get("src") == MVS_DEST_ARCHIVE + + cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + for target_ds in target_ds_list: + assert target_ds.get("name") in result.get("targets") + assert target_ds.get("name") in c_result.get("stdout") + else: + assert result.get("changed") is False + assert result.get("failed", False) is True + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + + +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize( + "data_set", [ + dict(name=TEST_PS, dstype="SEQ", members=[""]), + dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), + dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + ] +) +@pytest.mark.parametrize( + "record_length", [80, 120, 1024] +) +@pytest.mark.parametrize( + "record_format", ["FB", "VB",], +) +def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, data_set, record_length, record_format): + try: + hosts = ansible_zos_module + tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") + # Clean env + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + # Create source data set + hosts.all.zos_data_set( + name=data_set.get("name"), + type=data_set.get("dstype"), + state="present", + record_length=record_length, + record_format=record_format, + ) + # Create members if needed + if data_set.get("dstype") in ["PDS", "PDSE"]: + for member in data_set.get("members"): + hosts.all.zos_data_set( + name=f"{data_set.get('name')}({member})", + type="member", + state="present" + ) + # Write some content into src + test_line = "this is a test line" + for member in data_set.get("members"): + if member == "": + ds_to_write = f"{data_set.get('name')}" + else: + ds_to_write = f"{data_set.get('name')}({member})" + hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") + + format_dict = dict(name=format) + format_dict["format_options"] = dict(use_adrdssu=True) + if format == "terse": + format_dict["format_options"].update(terse_pack="SPACK") + archive_result = hosts.all.zos_archive( + src=data_set.get("name"), + dest=MVS_DEST_ARCHIVE, + format=format_dict, + ) + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == MVS_DEST_ARCHIVE + assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert MVS_DEST_ARCHIVE in c_result.get("stdout") + + hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + + # fetch archive data set into tmp folder + fetch_result = hosts.all.zos_fetch(src=MVS_DEST_ARCHIVE, dest=tmp_folder.name, is_binary=True) + + for res in fetch_result.contacted.values(): + source_path = res.get("dest") + + if format == "terse": + del format_dict["format_options"]["terse_pack"] + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=source_path, + format=format_dict, + remote_src=False, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("failed", False) is False + # assert result.get("dest") == MVS_DEST_ARCHIVE + # assert data_set.get("name") in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert data_set.get("name") in c_result.get("stdout") + finally: + hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + tmp_folder.cleanup() + diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index 8b4540038..a496e3ac8 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -33,3 +33,5 @@ plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issu plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 8b4540038..a496e3ac8 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -33,3 +33,5 @@ plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issu plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 8b4540038..a496e3ac8 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -33,3 +33,5 @@ plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issu plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 6d79e8952f70a3991f15a2a56eda1937390beb93 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 18 Jul 2023 15:58:50 -0400 Subject: [PATCH 141/495] Removed emergency backup and auto-recovery features. (#896) * Removed emergency backup and auto-recovery features. Initial changelog fragment. * corrected changelog with PR # * Update changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> * removed 2 unused routines: restore_backup and erase_backup. --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../696-zos-copy-remove-emergency-backup.yml | 6 + plugins/modules/zos_copy.py | 151 ------------------ 2 files changed, 6 insertions(+), 151 deletions(-) create mode 100644 changelogs/fragments/696-zos-copy-remove-emergency-backup.yml diff --git a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml new file mode 100644 index 000000000..b86a18d82 --- /dev/null +++ b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml @@ -0,0 +1,6 @@ +enhancements: +- zos_copy - Previously, backups were taken when force was set to false; + whether or not a user specified this operation which caused allocation issues + with space and permissions. This removes the automatic backup performed and + reverts to the original logic in that backups must be initiated by the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/896) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 0998f2a0e..02f71ab21 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1704,123 +1704,6 @@ def backup_data(ds_name, ds_type, backup_name, tmphlq=None): ) -def restore_backup( - dest, - backup, - dest_type, - use_backup, - volume=None, - members_to_restore=None, - members_to_delete=None -): - """Restores a destination file/directory/data set by using a given backup. - - Arguments: - dest (str) -- Name of the destination data set or path of the file/directory. - backup (str) -- Name or path of the backup. - dest_type (str) -- Type of the destination. - use_backup (bool) -- Whether the destination actually created a backup, sometimes the user - tries to use an empty data set, and in that case a new data set is allocated instead - of copied. - volume (str, optional) -- Volume where the data set should be. - members_to_restore (list, optional) -- List of members of a PDS/PDSE that were overwritten - and need to be restored. - members_to_delete (list, optional) -- List of members of a PDS/PDSE that need to be erased - because they were newly added. - """ - volumes = [volume] if volume else None - - if use_backup: - if dest_type == "USS": - if os.path.isfile(backup): - os.remove(dest) - shutil.copy(backup, dest) - else: - shutil.rmtree(dest, ignore_errors=True) - shutil.copytree(backup, dest) - else: - if dest_type in data_set.DataSet.MVS_VSAM: - data_set.DataSet.ensure_absent(dest, volumes) - repro_cmd = """ REPRO - - INDATASET('{0}') - - OUTDATASET('{1}')""".format(backup.upper(), dest.upper()) - idcams(repro_cmd, authorized=True) - elif dest_type in data_set.DataSet.MVS_SEQ: - response = datasets._copy(backup, dest) - if response.rc != 0: - raise CopyOperationError( - "An error ocurred while restoring {0} from {1}".format(dest, backup), - response.rc, - response.stdout_response, - response.stderr_response - ) - else: - if not members_to_restore: - members_to_restore = [] - if not members_to_delete: - members_to_delete = [] - - for i, member in enumerate(members_to_restore): - response = datasets._copy( - "{0}({1})".format(backup, member), - "{0}({1})".format(dest, member) - ) - - if response.rc != 0: - # In case of a failure, we'll assume that all past - # members in the list (with index < i) were restored successfully. - raise CopyOperationError( - "Error ocurred while restoring {0}({1}) from backup {2}.".format( - dest, - member, - backup - ) + " Members restored: {0}. Members that didn't get restored: {1}".format( - members_to_restore[:i], - members_to_restore[i:] - ), - response.rc, - response.stdout_response, - response.stderr_response - ) - - for i, member in enumerate(members_to_delete): - response = datasets._delete_members("{0}({1})".format(dest, member)) - - if response.rc != 0: - raise CopyOperationError( - "Error while deleting {0}({1}) after copy failure.".format(dest, member) + - " Members deleted: {0}. Members not able to be deleted: {1}".format( - members_to_delete[:i], - members_to_delete[i:] - ), - response.rc, - response.stdout_response, - response.stderr_response - ) - - else: - data_set.DataSet.ensure_absent(dest, volumes) - data_set.DataSet.allocate_model_data_set(dest, backup, volume) - - -def erase_backup(backup, dest_type, volume=None): - """Erases a temporary backup from the system. - - Arguments: - backup (str) -- Name or path of the backup. - dest_type (str) -- Type of the destination. - volume (str, optional) -- Volume where the data set should be. - """ - if dest_type == "USS": - if os.path.isfile(backup): - os.remove(backup) - else: - shutil.rmtree(backup, ignore_errors=True) - else: - volumes = [volume] if volume else None - data_set.DataSet.ensure_absent(backup, volumes) - - def is_compatible( src_type, dest_type, @@ -2609,32 +2492,6 @@ def run_module(module, arg_def): dest=dest ) - # Creating an emergency backup or an empty data set to use as a model to - # be able to restore the destination in case the copy fails. - emergency_backup = "" - if dest_exists and not force: - if is_uss or not data_set.DataSet.is_empty(dest_name): - use_backup = True - if is_uss: - # When copying a directory without a trailing slash, - # appending the source's base name to the backup path to - # avoid backing up the whole parent directory that won't - # be modified. - src_basename = os.path.basename(src) if src else '' - backup_dest = "{0}/{1}".format(dest, src_basename) if is_src_dir and not src.endswith("/") else dest - backup_dest = os.path.normpath(backup_dest) - emergency_backup = tempfile.mkdtemp() - emergency_backup = backup_data(backup_dest, dest_ds_type, emergency_backup, tmphlq) - else: - if not (dest_ds_type in data_set.DataSet.MVS_PARTITIONED and src_member and not dest_member_exists): - emergency_backup = backup_data(dest, dest_ds_type, None, tmphlq) - # If dest is an empty data set, instead create a data set to - # use as a model when restoring. - else: - use_backup = False - emergency_backup = data_set.DataSet.temp_name() - data_set.DataSet.allocate_model_data_set(emergency_backup, dest_name) - # Here we'll use the normalized source file by shadowing the # original one. This change applies only to the # allocate_destination_data_set call. @@ -2659,9 +2516,6 @@ def run_module(module, arg_def): volume=volume ) except Exception as err: - if dest_exists and not force: - restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) - erase_backup(emergency_backup, dest_ds_type) if converted_src: if remote_src: src = original_src @@ -2790,12 +2644,7 @@ def run_module(module, arg_def): res_args["changed"] = True except CopyOperationError as err: - if dest_exists and not force: - restore_backup(dest_name, emergency_backup, dest_ds_type, use_backup) raise err - finally: - if dest_exists and not force: - erase_backup(emergency_backup, dest_ds_type) res_args.update( dict( From 7720bd3d72ee9f51bf2878864b5c10688c2c5579 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 18 Jul 2023 16:01:34 -0400 Subject: [PATCH 142/495] Enhance/839/add field to job query (#841) * Initial change to manage 1.2.4 column changes for job listing * Improved documentation, code validated with playbooks. * Updated changelog fragment with PR * corrected duplicate entry in output documentation * Changes to comments and field names as per PR 841 Update to in-code documentation Expansion of changelog fragment * correction to datetime processing * fixing sample data --- .../839-Add-Field-to-zos-job-query.yml | 10 ++++ plugins/module_utils/job.py | 21 +++++++-- plugins/modules/zos_job_output.py | 46 +++++++++++++++++++ plugins/modules/zos_job_query.py | 38 ++++++++++----- plugins/modules/zos_job_submit.py | 33 +++++++++---- 5 files changed, 123 insertions(+), 25 deletions(-) create mode 100644 changelogs/fragments/839-Add-Field-to-zos-job-query.yml diff --git a/changelogs/fragments/839-Add-Field-to-zos-job-query.yml b/changelogs/fragments/839-Add-Field-to-zos-job-query.yml new file mode 100644 index 000000000..52370356c --- /dev/null +++ b/changelogs/fragments/839-Add-Field-to-zos-job-query.yml @@ -0,0 +1,10 @@ +minor_changes: +- zos_job_query - zoau added 'program_name' to their field output + starting with v1.2.4. This enhancement checks for that version and passes the extra column through. + (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_submit - zoau added 'program_name' to their field output + starting with v1.2.4. This enhancement checks for that version and passes the extra column through. + (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_output - zoau added 'program_name' to their field output + starting with v1.2.4. This enhancement checks for that version and passes the extra column through. + (https://github.com/ansible-collections/ibm_zos_core/pull/841) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d07ef5ac5..391583b75 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -207,7 +207,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] - # creationdate=job[9] creationtime=job[10] queueposition=job[11] + # creationdatetime=job[9] queueposition=job[10] + # starting in zoau 1.2.4, program_name[11] was added. final_entries = [] entries = listing(job_id=job_id_temp) @@ -246,17 +247,27 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"]["code"] = int(entry.rc) job["ret_code"]["msg_text"] = entry.status - # this section only works on zoau 1.2.3 vvv + # this section only works on zoau 1.2.3/+ vvv - if ZOAU_API_VERSION > "1.2.2": + if ZOAU_API_VERSION > "1.2.2" and ZOAU_API_VERSION < "1.2.4": job["job_class"] = entry.job_class job["svc_class"] = entry.svc_class job["priority"] = entry.priority job["asid"] = entry.asid - job["creation_datetime"] = entry.creation_datetime + job["creation_date"] = str(entry.creation_datetime)[0:10] + job["creation_time"] = str(entry.creation_datetime)[12:] job["queue_position"] = entry.queue_position + elif ZOAU_API_VERSION >= "1.2.4": + job["job_class"] = entry.job_class + job["svc_class"] = entry.svc_class + job["priority"] = entry.priority + job["asid"] = entry.asid + job["creation_date"] = str(entry.creation_datetime)[0:10] + job["creation_time"] = str(entry.creation_datetime)[12:] + job["queue_position"] = entry.queue_position + job["program_name"] = entry.program_name - # this section only works on zoau 1.2.3 ^^^ + # this section only works on zoau 1.2.3/+ ^^^ job["class"] = "" job["content_type"] = "" diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 3bf9f69d6..636698b3b 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -116,6 +116,16 @@ Type of address space. type: str sample: JOB + creation_date: + description: + Date, local to the target system, when the job was created. + type: str + sample: "2023-05-04" + creation_time: + description: + Time, local to the target system, when the job was created. + type: str + sample: "14:15:00" ddnames: description: Data definition names. @@ -175,6 +185,37 @@ " 6 //SYSUT2 DD SYSOUT=* ", " 7 // " ] + job_class: + description: + Job class for this job. + type: str + sample: A + svc_class: + description: + Service class for this job. + type: str + sample: C + priority: + description: + A numeric indicator of the job priority assigned through JES. + type: int + sample: 4 + asid: + description: + The address Space Identifier (ASID) that is a unique descriptor for the job address space. + Zero if not active. + type: int + sample: 0 + queue_position: + description: + The position within the job queue where the jobs resides. + type: int + sample: 3 + program_name: + description: + The name of the program found in the job's last completed step found in the PGM parameter. + type: str + sample: "IEBGENER" ret_code: description: Return code output collected from job log. @@ -341,8 +382,13 @@ "stepname": "STEP0001" } ], + "duration": 0, + "job_class": "R", "job_id": "JOB00134", "job_name": "HELLO", + "priority": "1", + "program_name": "IEBGENER", + "queue_position": "58", "owner": "OMVSADM", "ret_code": { "code": 0, diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index cb9a28a53..431e06f02 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -176,12 +176,12 @@ } job_class: description: - Letter indicating job class for this job. + Job class for this job. type: str sample: A svc_class: description: - Character indicating service class for this job. + Service class for this job. type: str sample: C priority: @@ -191,19 +191,31 @@ sample: 4 asid: description: - An identifier created by JES. + The address Space Identifier (ASID) that is a unique descriptor for the job address space. + Zero if not active. type: int sample: 0 - creation_datetime: + creation_date: description: - Date and time, local to the target system, when the job was created. + Date, local to the target system, when the job was created. type: str - sample: 20230504T141500 + sample: "2023-05-04" + creation_time: + description: + Time, local to the target system, when the job was created. + type: str + sample: "14:15:00" queue_position: description: - Integer of the position within the job queue where this jobs resided. + The position within the job queue where the jobs resides. type: int sample: 3 + program_name: + description: + The name of the program found in the job's last completed step found in the PGM parameter. + type: str + sample: "IEBGENER" + sample: [ { @@ -215,7 +227,8 @@ "svc_class": "?", "priority": 1, "asid": 0, - "creation_datetime": "20230503T121300", + "creation_date": "2023-05-03", + "creation_time": "12:13:00", "queue_position": 3, }, { @@ -227,7 +240,8 @@ "svc_class": "E", "priority": 0, "asid": 4, - "creation_datetime": "20230503T121400", + "creation_date": "2023-05-03", + "creation_time": "12:14:00", "queue_position": 0, }, ] @@ -277,7 +291,7 @@ def run_module(): module.exit_json(**result) -# validate_arguments rturns a tuple, so we don't have to rebuild the job_name string +# validate_arguments returns a tuple, so we don't have to rebuild the job_name string def validate_arguments(params): job_name_in = params.get("job_name") @@ -400,8 +414,10 @@ def parsing_jobs(jobs_raw): "svc_class": job.get("svc_class"), "priority": job.get("priority"), "asid": job.get("asid"), - "creation_datetime": job.get("creation_datetime"), + "creation_date": job.get("creation_date"), + "creation_time": job.get("creation_time"), "queue_position": job.get("queue_position"), + "program_name": job.get("program_name"), } jobs.append(job_dict) return jobs diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index bc9f8ff19..994f4147d 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -282,12 +282,12 @@ } job_class: description: - Letter indicating job class for this job. + Job class for this job. type: str sample: A svc_class: description: - Character indicating service class for this job. + Service class for this job. type: str sample: C priority: @@ -297,19 +297,31 @@ sample: 4 asid: description: - An identifier created by JES. + The address Space Identifier (ASID) that is a unique descriptor for the job address space. + Zero if not active. type: int sample: 0 - creation_datetime: + creation_date: description: - Date and time, local to the target system, when the job was created. + Date, local to the target system, when the job was created. type: str - sample: 20230504T141500 + sample: "2023-05-04" + creation_time: + description: + Time, local to the target system, when the job was created. + type: str + sample: "14:15:00" queue_position: description: - Integer of the position within the job queue where this jobs resided. + The position within the job queue where the jobs resides. type: int sample: 3 + program_name: + description: + The name of the program found in the job's last completed step found in the PGM parameter. + type: str + sample: "IEBGENER" + sample: [ { @@ -526,10 +538,13 @@ "job_class": "K", "svc_class": "?", "priority": 1, + "program_name": "IEBGENER", "asid": 0, - "creation_datetime": "20230503T121300", + "creation_date": "2023-05-03", + "creation_time": "12:13:00", "queue_position": 3, - "subsystem": "STL1" + "subsystem": "STL1", + "system": "STL1" } ] message: From cf755e31a46cc231fb818faa5ab1b8612045d173 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 18 Jul 2023 14:06:10 -0600 Subject: [PATCH 143/495] Enhancement/850/Redesining test cases to be clearer (#840) * Remove duplicates and unnecesary declaration from lineinfile test * Delete blockninfile repeat and unecesary cases and finishing lineinfile * Solve test do not pass * Summary USS test case lineinfile * Clean ds tests general * Finishing clear lineinfile * Lineinfile clean proposal * Clean lineinfile * Finishing the clean of lineinfile and blockinfile * Lineinfile USS fully cleaned * Lineinfile clean * Clean blocinfile test * Structure for all test cases * Add fragment * Finall version without dictionaries * Add expected to variables encoding test case and simplify names * Close lineinfile * Remove the unnecesary marks * Get better encoding tests * Get better encoding tests * Remove encodings of datasets * Add encoding for ds --- .../fragments/840-redesign-test-cases.yml | 7 + .../modules/test_zos_blockinfile_func.py | 2079 ++++++++--------- .../modules/test_zos_lineinfile_func.py | 1568 ++++++------- tests/helpers/zos_blockinfile_helper.py | 345 --- tests/helpers/zos_lineinfile_helper.py | 340 --- 5 files changed, 1743 insertions(+), 2596 deletions(-) create mode 100644 changelogs/fragments/840-redesign-test-cases.yml delete mode 100644 tests/helpers/zos_blockinfile_helper.py delete mode 100644 tests/helpers/zos_lineinfile_helper.py diff --git a/changelogs/fragments/840-redesign-test-cases.yml b/changelogs/fragments/840-redesign-test-cases.yml new file mode 100644 index 000000000..8b9c2aee0 --- /dev/null +++ b/changelogs/fragments/840-redesign-test-cases.yml @@ -0,0 +1,7 @@ +trivial: +- zos_lininfile - Adjust test cases to be in one document and clearer to follow. +- zos_blockinfile - Adjust test cases to be in one document and clearer to follow. +bugfix: +- zos_blockinfile - Test case generate a data set that was not correctly removed. + Changes delete the correct data set not only member. + (https://github.com/ansible-collections/ibm_zos_core/pull/840) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 7cd92c9e5..23982aeec 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -12,20 +12,41 @@ # limitations under the License. from __future__ import absolute_import, division, print_function -from ibm_zos_core.tests.helpers.zos_blockinfile_helper import ( - UssGeneral, - DsGeneral, - DsNotSupportedHelper, - DsGeneralResultKeyMatchesRegex, - DsGeneralForce, - DsGeneralForceFail, -) -import os -import sys +from shellescape import quote +import time +import re import pytest +import inspect __metaclass__ = type +DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" +TEST_FOLDER_BLOCKINFILE = "/tmp/ansible-core-tests/zos_blockinfile/" + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + TEST_CONTENT = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none @@ -33,33 +54,12 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""" +export ZOAU_ROOT""" TEST_CONTENT_DEFAULTMARKER = """if [ -z STEPLIB ] && tty -s; then @@ -68,35 +68,16 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 # BEGIN ANSIBLE MANAGED BLOCK ZOAU_ROOT=/usr/lpp/zoautil/v100 ZOAUTIL_DIR=/usr/lpp/zoautil/v100 # END ANSIBLE MANAGED BLOCK -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""" +export ZOAU_ROOT""" TEST_CONTENT_CUSTOMMARKER = """if [ -z STEPLIB ] && tty -s; then @@ -105,35 +86,16 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 # OPEN IBM MANAGED BLOCK ZOAU_ROOT=/usr/lpp/zoautil/v100 ZOAUTIL_DIR=/usr/lpp/zoautil/v100 # CLOSE IBM MANAGED BLOCK -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""" +export ZOAU_ROOT""" TEST_CONTENT_DOUBLEQUOTES = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //USSCMD EXEC PGM=BPXBATCH @@ -145,128 +107,13 @@ /* //""" -# supported data set types -# DS_TYPE = ['SEQ', 'PDS', 'PDSE'] -DS_TYPE = ['SEQ'] -# not supported data set types -NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] -""" -Note: zos_encode module uses USS cp command for copying from USS file to MVS data set which only supports IBM-1047 charset. -I had to develop and use a new tool for converting and copying to data set in order to set up environment for tests to publish results on Jira. -Until the issue be addressed I disable related tests. -""" -# ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] -ENCODING = ['IBM-1047'] -USS_BACKUP_FILE = "/tmp/backup.tmp" -MVS_BACKUP_DS = "BLOCKIF.TEST.BACKUP" -MVS_BACKUP_PDS = "BLOCKIF.TEST.BACKUP(BACKUP)" -BACKUP_OPTIONS = [None, MVS_BACKUP_DS, MVS_BACKUP_PDS] -TEST_ENV = dict( - TEST_CONT=TEST_CONTENT, - TEST_DIR="/tmp/zos_blockinfile/", - TEST_FILE="", - DS_NAME="", - DS_TYPE="", - ENCODING="", -) - -TEST_INFO = dict( - test_uss_block_insertafter_regex=dict( - insertafter="ZOAU_ROOT=", - block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", - state="present"), - test_uss_block_insertbefore_regex=dict( - insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", - state="present"), - test_uss_block_insertafter_eof=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present"), - test_uss_block_insertafter_eof_with_backup=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", backup=True), - test_uss_block_insertafter_eof_with_backup_name=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", backup=True, - backup_name=USS_BACKUP_FILE), - test_uss_block_insert_with_force_option_as_true=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", force=True), - test_uss_block_insert_with_force_option_as_false=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", force=False), - test_uss_block_insertbefore_bof=dict( - insertbefore="BOF", block="# this is file is for setting env vars", - state="present"), - test_uss_block_absent=dict(block="", state="absent"), - test_uss_block_absent_with_force_option_as_true=dict(block="", state="absent", force=True), - test_uss_block_absent_with_force_option_as_false=dict(block="", state="absent", force=True), - test_uss_block_replace_insertafter_regex=dict( - insertafter="PYTHON_HOME=", - block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", - state="present"), - test_uss_block_replace_insertbefore_regex=dict( - insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", - state="present"), - test_uss_block_replace_insertafter_eof=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present"), - test_uss_block_replace_insertbefore_bof=dict( - insertbefore="BOF", block="# this is file is for setting env vars", - state="present"), - test_uss_block_insert_with_indentation_level_specified=dict( - insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", indentation=16), - test_uss_block_insert_with_doublequotes=dict( - insertafter="sleep 30;", block='cat \"//OMVSADMI.CAT\"\ncat \"//OMVSADM.COPYMEM.TESTS\" > test.txt', - marker="// {mark} ANSIBLE MANAGED BLOCK",state="present"), - test_ds_block_insertafter_regex=dict(test_name="T1"), - test_ds_block_insertbefore_regex=dict(test_name="T2"), - test_ds_block_insertafter_eof=dict(test_name="T3"), - test_ds_block_insertbefore_bof=dict(test_name="T4"), - test_ds_block_absent=dict(test_name="T5"), - test_ds_block_tmp_hlq_option=dict( - insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, - tmp_hlq="TMPHLQ"), - test_ds_block_insert_with_force_option_as_true=dict( - block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", force=True), - test_ds_block_absent_with_force_option_as_true=dict( - block="", state="absent", force=True), - test_ds_block_insert_with_force_option_as_false=dict( - block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", force=False), - test_ds_block_absent_with_force_option_as_false=dict(block="", state="absent", force=False), - test_ds_block_insert_with_indentation_level_specified=dict(test_name="T7"), - test_ds_block_insertafter_eof_with_backup=dict( - block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True), - test_ds_block_insertafter_eof_with_backup_name=dict( - block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", - state="present", backup=True, backup_name=MVS_BACKUP_DS), - test_ds_block_insertafter_regex_force=dict( - path="",insertafter="ZOAU_ROOT=", - block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", - state="present", force=True), - test_ds_block_insertafter_regex_force_fail=dict( - path="",insertafter="ZOAU_ROOT=", - block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", - state="present", force=False), - expected=dict(test_uss_block_insertafter_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; +EXPECTED_INSERTAFTER_REGEX = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 @@ -275,50 +122,17 @@ ZOAU_HOME=$ZOAU_ROOT ZOAU_DIR=$ZOAU_ROOT # END ANSIBLE MANAGED BLOCK -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insert_with_doublequotes="""//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M -//USSCMD EXEC PGM=BPXBATCH -//STDERR DD SYSOUT=* -//STDOUT DD SYSOUT=* -//STDPARM DD * -SH ls -la /; -sleep 30; -// BEGIN ANSIBLE MANAGED BLOCK -cat "//OMVSADMI.CAT" -cat "//OMVSADM.COPYMEM.TESTS" > test.txt -// END ANSIBLE MANAGED BLOCK -/* -//""", - test_uss_block_insertbefore_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTBEFORE_REGEX = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 # BEGIN ANSIBLE MANAGED BLOCK @@ -327,58 +141,30 @@ unset ZOAU_DIR # END ANSIBLE MANAGED BLOCK ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insertafter_eof_defaultmarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTAFTER_EOF = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT # BEGIN ANSIBLE MANAGED BLOCK export ZOAU_ROOT export ZOAU_HOME export ZOAU_DIR -# END ANSIBLE MANAGED BLOCK""", - test_uss_block_insertbefore_bof_defaultmarker="""# BEGIN ANSIBLE MANAGED BLOCK +# END ANSIBLE MANAGED BLOCK""" + +EXPECTED_INSERTBEFORE_BOF = """# BEGIN ANSIBLE MANAGED BLOCK # this is file is for setting env vars # END ANSIBLE MANAGED BLOCK if [ -z STEPLIB ] && tty -s; @@ -388,52 +174,20 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insertafter_regex_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTAFTER_REGEX_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 @@ -442,37 +196,17 @@ ZOAU_HOME=$ZOAU_ROOT ZOAU_DIR=$ZOAU_ROOT # CLOSE IBM MANAGED BLOCK -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insertbefore_regex_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTBEFORE_REGEX_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 # OPEN IBM MANAGED BLOCK @@ -481,58 +215,30 @@ unset ZOAU_DIR # CLOSE IBM MANAGED BLOCK ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insertafter_eof_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_INSERTAFTER_EOF_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT # OPEN IBM MANAGED BLOCK export ZOAU_ROOT export ZOAU_HOME export ZOAU_DIR -# CLOSE IBM MANAGED BLOCK""", - test_uss_block_insertbefore_bof_custommarker="""# OPEN IBM MANAGED BLOCK +# CLOSE IBM MANAGED BLOCK""" + +EXPECTED_INSERTBEFORE_BOF_CUSTOM = """# OPEN IBM MANAGED BLOCK # this is file is for setting env vars # CLOSE IBM MANAGED BLOCK if [ -z STEPLIB ] && tty -s; @@ -542,87 +248,56 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_absent="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_ABSENT = """if [ -z STEPLIB ] && tty -s; +then + export STEPLIB=none + exec -a 0 SHELL +fi +TZ=PST8PDT +export TZ +export MAIL +umask 022 +ZOAU_ROOT=/usr/lpp/zoautil/v100 +PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig +PYTHON_HOME=/usr/lpp/izoda/v110/anaconda +export ZOAU_ROOT""" + +EXPECTED_INSERT_WITH_INDENTATION = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertafter_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; +# BEGIN ANSIBLE MANAGED BLOCK + export ZOAU_ROOT + export ZOAU_HOME + export ZOAU_DIR +# END ANSIBLE MANAGED BLOCK""" + +EXPECTED_REPLACE_INSERTAFTER = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda # BEGIN ANSIBLE MANAGED BLOCK @@ -630,36 +305,18 @@ ZOAU_HOME=$ZOAU_ROOT ZOAU_DIR=$ZOAU_ROOT # END ANSIBLE MANAGED BLOCK -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertbefore_regex_defaultmarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_REPLACE_INSERTBEFORE = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig # BEGIN ANSIBLE MANAGED BLOCK unset ZOAU_ROOT @@ -667,52 +324,28 @@ unset ZOAU_DIR # END ANSIBLE MANAGED BLOCK PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertafter_eof_defaultmarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_REPLACE_EOF_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT # BEGIN ANSIBLE MANAGED BLOCK export ZOAU_ROOT export ZOAU_HOME export ZOAU_DIR -# END ANSIBLE MANAGED BLOCK""", - test_uss_block_replace_insertbefore_bof_defaultmarker="""# BEGIN ANSIBLE MANAGED BLOCK +# END ANSIBLE MANAGED BLOCK""" + +EXPECTED_REPLACE_BOF_CUSTOM = """# BEGIN ANSIBLE MANAGED BLOCK # this is file is for setting env vars # END ANSIBLE MANAGED BLOCK if [ -z STEPLIB ] && tty -s; @@ -722,53 +355,23 @@ fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertafter_regex_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_REPLACE_EOF_REGEX_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig PYTHON_HOME=/usr/lpp/izoda/v110/anaconda # OPEN IBM MANAGED BLOCK @@ -776,36 +379,18 @@ ZOAU_HOME=$ZOAU_ROOT ZOAU_DIR=$ZOAU_ROOT # CLOSE IBM MANAGED BLOCK -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertbefore_regex_custommarker="""if [ -z STEPLIB ] && tty -s; +export ZOAU_ROOT""" + +EXPECTED_REPLACE_BOF_REGEX_CUSTOM = """if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi TZ=PST8PDT export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME export MAIL umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib +ZOAU_ROOT=/usr/lpp/zoautil/v100 PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig # OPEN IBM MANAGED BLOCK unset ZOAU_ROOT @@ -813,126 +398,71 @@ unset ZOAU_DIR # CLOSE IBM MANAGED BLOCK PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_replace_insertafter_eof_custommarker="""if [ -z STEPLIB ] && tty -s; -then - export STEPLIB=none - exec -a 0 SHELL -fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT -# OPEN IBM MANAGED BLOCK -export ZOAU_ROOT -export ZOAU_HOME -export ZOAU_DIR -# CLOSE IBM MANAGED BLOCK""", - test_uss_block_replace_insertbefore_bof_custommarker="""# OPEN IBM MANAGED BLOCK -# this is file is for setting env vars -# CLOSE IBM MANAGED BLOCK -if [ -z STEPLIB ] && tty -s; -then - export STEPLIB=none - exec -a 0 SHELL -fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_block_insert_with_indentation_level_specified="""if [ -z STEPLIB ] && tty -s; -then - export STEPLIB=none - exec -a 0 SHELL -fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT +export ZOAU_ROOT""" + +EXPECTED_DOUBLE_QUOTES = """//BPXSLEEP JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//USSCMD EXEC PGM=BPXBATCH +//STDERR DD SYSOUT=* +//STDOUT DD SYSOUT=* +//STDPARM DD * +SH ls -la /; +sleep 30; +// BEGIN ANSIBLE MANAGED BLOCK +cat "//OMVSADMI.CAT" +cat "//OMVSADM.COPYMEM.TESTS" > test.txt +// END ANSIBLE MANAGED BLOCK +/* +//""" + +EXPECTED_ENCODING = """SIMPLE LINE TO VERIFY # BEGIN ANSIBLE MANAGED BLOCK - export ZOAU_ROOT - export ZOAU_HOME - export ZOAU_DIR -# END ANSIBLE MANAGED BLOCK""",), -) +Insert this string +# END ANSIBLE MANAGED BLOCK""" + +""" +Note: zos_encode module uses USS cp command for copying from USS file to MVS data set which only supports IBM-1047 charset. +I had to develop and use a new tool for converting and copying to data set in order to set up environment for tests to publish results on Jira. +Until the issue be addressed I disable related tests. +""" +ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] + +# supported data set types +DS_TYPE = ['SEQ', 'PDS', 'PDSE'] + +# not supported data set types +NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] + +USS_BACKUP_FILE = "/tmp/backup.tmp" +BACKUP_OPTIONS = [None, "BLOCKIF.TEST.BACKUP", "BLOCKIF.TEST.BACKUP(BACKUP)"] + +def set_uss_environment(ansible_zos_module, CONTENT, FILE): + hosts = ansible_zos_module + hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_BLOCKINFILE)) + hosts.all.file(path=FILE, state="touch") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, FILE)) + +def remove_uss_environment(ansible_zos_module): + hosts = ansible_zos_module + hosts.all.shell(cmd="rm -rf" + TEST_FOLDER_BLOCKINFILE) + +def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT): + hosts = ansible_zos_module + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) + hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) + if DS_TYPE in ["PDS", "PDSE"]: + DS_FULL_NAME = DS_NAME + "(MEM)" + hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) + else: + DS_FULL_NAME = DS_NAME + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), DS_FULL_NAME) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + return DS_FULL_NAME + +def remove_ds_environment(ansible_zos_module, DS_NAME): + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DS_NAME, state="absent") ######################### # USS test cases @@ -941,314 +471,461 @@ @pytest.mark.uss def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): - UssGeneral( - "test_uss_block_insertafter_regex_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertafter_regex"], - TEST_INFO["expected"]["test_uss_block_insertafter_regex_defaultmarker"]) + hosts = ansible_zos_module + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): - UssGeneral( - "test_uss_block_insertbefore_regex_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_block_insertbefore_regex_defaultmarker"]) + hosts = ansible_zos_module + params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): - UssGeneral( - "test_uss_block_insertafter_eof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): - UssGeneral( - "test_uss_block_insertbefore_bof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_block_insertbefore_bof_defaultmarker"]) + hosts = ansible_zos_module + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_insertafter_regex"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - UssGeneral( - "test_uss_block_insertafter_regex_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_insertafter_regex_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] + hosts = ansible_zos_module + # Set special parameters for the test as marker + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) + @pytest.mark.uss def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_insertbefore_regex"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - UssGeneral( - "test_uss_block_insertbefore_regex_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_insertbefore_regex_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] + hosts = ansible_zos_module + # Set special parameters for the test as marker + params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_insertafter_eof"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - UssGeneral( - "test_uss_block_insertafter_eof_custommarker", ansible_zos_module, - TEST_ENV, _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_insertafter_eof_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] + hosts = ansible_zos_module + # Set special parameters for the test as marker + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_insertbefore_bof"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - UssGeneral( - "test_uss_block_insertbefore_bof_custommarker", ansible_zos_module, - TEST_ENV, _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_insertbefore_bof_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] + hosts = ansible_zos_module + # Set special parameters for the test as marker + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_absent_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_absent_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_absent"], - TEST_INFO["expected"]["test_uss_block_absent"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(block="", state="absent") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_absent_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_absent"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_absent_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_absent"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(block="", state="absent") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_CUSTOMMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_replace_insertafter_regex_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertafter_regex"], - TEST_INFO["expected"]["test_uss_block_replace_insertafter_regex_defaultmarker"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_replace_insertbefore_regex_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_regex_defaultmarker"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_replace_insertafter_eof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_replace_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_replace_insertafter_eof_defaultmarker"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_EOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_replace_insertbefore_bof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_replace_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_bof_defaultmarker"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DEFAULTMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_BOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_replace_insertafter_regex"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_replace_insertafter_regex_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_replace_insertafter_regex_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_EOF_REGEX_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_replace_insertbefore_regex"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_replace_insertbefore_regex_custommarker", ansible_zos_module, TEST_ENV, - _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_regex_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_CUSTOMMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_BOF_REGEX_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_replace_insertafter_eof"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_replace_insertafter_eof_custommarker", ansible_zos_module, - TEST_ENV, _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_replace_insertafter_eof_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_CUSTOMMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): - _TEST_INFO = TEST_INFO["test_uss_block_replace_insertbefore_bof"] - _TEST_INFO["marker"] = '# {mark} IBM MANAGED BLOCK' - _TEST_INFO["marker_begin"] = 'OPEN' - _TEST_INFO["marker_end"] = 'CLOSE' - TEST_ENV["TEST_CONT"] = TEST_CONTENT_CUSTOMMARKER - UssGeneral( - "test_uss_block_replace_insertbefore_bof_custommarker", ansible_zos_module, - TEST_ENV, _TEST_INFO, - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_bof_custommarker"]) - del _TEST_INFO["marker"] - del _TEST_INFO["marker_begin"] - del _TEST_INFO["marker_end"] - TEST_ENV["TEST_CONT"] = TEST_CONTENT - - -@pytest.mark.uss -def test_uss_block_insert_with_force_option_as_true(ansible_zos_module): - UssGeneral( - "test_uss_block_insertafter_eof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insert_with_force_option_as_true"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) - - -@pytest.mark.uss -def test_uss_block_insert_with_force_option_as_false(ansible_zos_module): - UssGeneral( - "test_uss_block_insertafter_eof_defaultmarker", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insert_with_force_option_as_false"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) - - -@pytest.mark.uss -def test_uss_block_absent_with_force_option_as_true(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_absent_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_absent_with_force_option_as_true"], - TEST_INFO["expected"]["test_uss_block_absent"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT - - -@pytest.mark.uss -def test_uss_block_absent_with_force_option_as_false(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - UssGeneral( - "test_uss_block_absent_defaultmarker", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_absent_with_force_option_as_false"], - TEST_INFO["expected"]["test_uss_block_absent"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params["marker"] = '# {mark} IBM MANAGED BLOCK' + params["marker_begin"] = 'OPEN' + params["marker_end"] = 'CLOSE' + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_CUSTOMMARKER + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): - UssGeneral( - "test_uss_block_insert_with_indentation_level_specified", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insert_with_indentation_level_specified"], - TEST_INFO["expected"]["test_uss_block_insert_with_indentation_level_specified"]) + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16) + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insert_with_doublequotes(ansible_zos_module): - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DOUBLEQUOTES - UssGeneral( - "test_uss_block_insert_with_doublequotes", ansible_zos_module,TEST_ENV, - TEST_INFO["test_uss_block_insert_with_doublequotes"], - TEST_INFO["expected"]["test_uss_block_insert_with_doublequotes"]) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + hosts = ansible_zos_module + params = dict(insertafter="sleep 30;", block='cat \"//OMVSADMI.CAT\"\ncat \"//OMVSADM.COPYMEM.TESTS\" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DOUBLEQUOTES + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES + finally: + remove_uss_environment(ansible_zos_module) + @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT try: - backup_name = USS_BACKUP_FILE - uss_result = UssGeneral( - "test_uss_block_insertafter_eof_with_backup", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertafter_eof_with_backup"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) - for result in uss_result.contacted.values(): + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): backup_name = result.get("backup_name") + assert result.get("changed") == 1 assert backup_name is not None + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: ansible_zos_module.all.file(path=backup_name, state="absent") + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=USS_BACKUP_FILE) + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT try: - uss_result = UssGeneral( - "test_uss_block_insertafter_eof_with_backup_name", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertafter_eof_with_backup_name"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"]) - for result in uss_result.contacted.values(): + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 assert result.get("backup_name") == USS_BACKUP_FILE - cmdStr = "cat {0}".format(result.get("backup_name")) - results = ansible_zos_module.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - assert result.get("stdout") == TEST_ENV["TEST_CONT"] + cmdStr = "cat {0}".format(USS_BACKUP_FILE) + results = ansible_zos_module.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + assert result.get("stdout") == TEST_CONTENT + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: ansible_zos_module.all.file(path=USS_BACKUP_FILE, state="absent") + remove_uss_environment(ansible_zos_module) ######################### @@ -1258,267 +935,430 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insertafter_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertafter_regex"], - TEST_INFO["expected"]["test_uss_block_insertafter_regex_defaultmarker"] - ) +def test_ds_block_insertafter_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + test_name = "DST1" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insertbefore_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insertbefore_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_block_insertbefore_regex_defaultmarker"] - ) +def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + test_name = "DST2" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insertafter_eof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insertafter_eof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] - ) +def test_ds_block_insertafter_eof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + test_name = "DST3" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insertbefore_bof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insertbefore_bof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_block_insertbefore_bof_defaultmarker"] - ) +def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + test_name = "DST4" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertafter_regex"], - TEST_INFO["expected"]["test_uss_block_replace_insertafter_regex_defaultmarker"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + test_name = "DST5" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_insertbefore_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_regex_defaultmarker"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + test_name = "DST6" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_insertafter_eof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_replace_insertafter_eof_defaultmarker"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + test_name = "DST7" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_insertbefore_bof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_replace_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_block_replace_insertbefore_bof_defaultmarker"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + test_name = "DST8" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_absent(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - TEST_INFO["test_ds_block_absent"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_absent"], - TEST_INFO["expected"]["test_uss_block_absent"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +def test_ds_block_absent(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(block="", state="absent") + test_name = "DST9" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT_DEFAULTMARKER + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_tmp_hlq_option(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - test_name = "T6" +def test_ds_tmp_hlq_option(ansible_zos_module): + # This TMPHLQ only works with sequential datasets + hosts = ansible_zos_module + ds_type = "SEQ" + params=dict(insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, tmp_hlq="TMPHLQ") kwargs = dict(backup_name=r"TMPHLQ\..") - DsGeneralResultKeyMatchesRegex( - test_name, ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_block_tmp_hlq_option"], - **kwargs - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insert_with_force_option_as_true(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - "T6", - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_block_insert_with_force_option_as_true"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_absent_with_force_option_as_true(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - "T7", ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_block_absent_with_force_option_as_true"], - TEST_INFO["expected"]["test_uss_block_absent"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT + test_name = "DST10" + temp_file = "/tmp/zos_lineinfile/" + test_name + content = TEST_CONTENT + try: + hosts.all.shell(cmd="mkdir -p {0}".format("/tmp/zos_lineinfile/")) + results = hosts.all.shell(cmd='hlq') + for result in results.contacted.values(): + hlq = result.get("stdout") + if len(hlq) > 8: + hlq = hlq[:8] + ds_full_name = hlq + "." + test_name.upper() + "." + ds_type + hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + "/tmp/zos_lineinfile/") + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(ds_full_name)) + for result in results.contacted.values(): + print(result) + assert int(result.get("stdout")) != 0 + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + for key in kwargs: + assert re.match(kwargs.get(key), result.get(key)) + finally: + hosts.all.zos_data_set(name=ds_full_name, state="absent") @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insert_with_force_option_as_false(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - "T8", ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_block_insert_with_force_option_as_false"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] - ) +def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16) + test_name = "DST11" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_absent_with_force_option_as_false(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_ENV["TEST_CONT"] = TEST_CONTENT_DEFAULTMARKER - DsGeneral( - "T9", ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_block_absent_with_force_option_as_false"], - TEST_INFO["expected"]["test_uss_block_absent"] - ) - TEST_ENV["TEST_CONT"] = TEST_CONTENT +@pytest.mark.parametrize("backup_name", BACKUP_OPTIONS) +def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup_name): + hosts = ansible_zos_module + ds_type = dstype + params = dict(block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) + if backup_name: + params["backup_name"] = backup_name + test_name = "DST12" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + if backup_name: + backup_ds_name = result.get("backup_name") + assert backup_ds_name is not None + else: + backup_ds_name = result.get("backup_name") + assert backup_ds_name is not None + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) + if backup_name: + ansible_zos_module.all.zos_data_set(name="BLOCKIF.TEST.BACKUP", state="absent") + ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") + else: + ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_block_insert_with_indentation_level_specified"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insert_with_indentation_level_specified"], - TEST_INFO["expected"]["test_uss_block_insert_with_indentation_level_specified"] - ) - @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("backup_name", BACKUP_OPTIONS) -def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, encoding, backup_name): +def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="",insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=True) + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + content = TEST_CONTENT + if ds_type == "SEQ": + params["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + else: + params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) try: - backup_ds_name = MVS_BACKUP_DS - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - TEST_INFO["test_ds_block_insertafter_eof_with_backup"]["backup_name"] = backup_name - ds_result = DsGeneral( - "T10", - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_block_insertafter_eof_with_backup"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + batch=[ + { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", "state": "present", "replace": True, }, + { "name": params["path"], "type": "member", + "state": "present", "replace": True, }, + ] ) - for result in ds_result.contacted.values(): - backup_ds_name = result.get("backup_name") - assert backup_ds_name is not None - if encoding == 'IBM-1047': - cmdStr = "cat \"//'{0}'\" ".format(backup_ds_name) - results = ansible_zos_module.all.shell(cmd=cmdStr) - print(vars(results)) - for result in results.contacted.values(): - assert result.get("stdout") == TEST_ENV["TEST_CONT"] + # write memeber to verify cases + if ds_type in ["PDS", "PDSE"]: + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + else: + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) + hosts.all.shell(cmd=cmdStr) + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + time.sleep(5) + # call lineinfile to see results + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == True + results = hosts.all.shell(cmd=r"""cat "//'{0}'" """.format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: - ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") +######################### +# Encoding tests +######################### +@pytest.mark.uss +@pytest.mark.parametrize("encoding", ENCODING) +def test_uss_encoding(ansible_zos_module, encoding): + hosts = ansible_zos_module + insert_data = "Insert this string" + params = dict(insertafter="SIMPLE", block=insert_data, state="present") + params["encoding"] = encoding + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = "SIMPLE LINE TO VERIFY" + try: + hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_BLOCKINFILE)) + hosts.all.file(path=full_path, state="touch") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) + hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"]) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): - TEST_ENV["DS_TYPE"] = dstype - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_block_insertafter_regex_force"], - TEST_INFO["expected"]["test_uss_block_insertafter_regex_defaultmarker"] - ) - - +@pytest.mark.parametrize("encoding", ["IBM-1047"]) +def test_ds_encoding(ansible_zos_module, encoding, dstype): + hosts = ansible_zos_module + ds_type = dstype + insert_data = "Insert this string" + params = dict(insertafter="SIMPLE", block=insert_data, state="present") + params["encoding"] = encoding + test_name = "DST13" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = "SIMPLE LINE TO VERIFY" + try: + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) + hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"]) + hosts.all.zos_data_set(name=ds_name, type=ds_type) + if ds_type in ["PDS", "PDSE"]: + ds_full_name = ds_name + "(MEM)" + hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) + else: + ds_full_name = ds_name + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + temp_file) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + hosts.all.zos_encode(src=ds_full_name, dest=ds_full_name, from_encoding=params["encoding"], to_encoding="IBM-1047") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING + finally: + remove_ds_environment(ansible_zos_module, ds_name) ######################### # Negative tests ######################### @@ -1527,34 +1367,43 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): @pytest.mark.ds def test_not_exist_ds_block_insertafter_regex(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO["test_uss_block_insertafter_regex"] - test_info["path"] = "BIFTEST.NOTEXIST.SEQ" - results = hosts.all.zos_blockinfile(**test_info) + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params["path"] = "BIFTEST.NOTEXIST.SEQ" + results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert "does NOT exist" in result.get("msg") @pytest.mark.ds def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): - TEST_ENV["DS_TYPE"] = 'SEQ' - TEST_ENV["ENCODING"] = 'IBM-1047' - TEST_INFO["test_uss_block_insertafter_eof"]["insertafter"] = 'SOME_NON_EXISTING_PATTERN' - DsGeneral( - TEST_INFO["test_ds_block_insertafter_eof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_block_insertafter_eof"], - TEST_INFO["expected"]["test_uss_block_insertafter_eof_defaultmarker"] - ) + hosts = ansible_zos_module + ds_type = 'SEQ' + params=dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params["insertafter"] = 'SOME_NON_EXISTING_PATTERN' + test_name = "DST13" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds def test_ds_block_insertafter_regex_wrongmarker(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO["test_uss_block_insertafter_regex"] - test_info["path"] = "BIFTEST.NOTEXIST.SEQ" - test_info["marker"] = '# MANAGED BLOCK' - results = hosts.all.zos_blockinfile(**test_info) - del test_info["marker"] + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params["path"] = "BIFTEST.NOTEXIST.SEQ" + params["marker"] = '# MANAGED BLOCK' + results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert "marker should have {mark}" in result.get("msg") @@ -1562,18 +1411,74 @@ def test_ds_block_insertafter_regex_wrongmarker(ansible_zos_module): @pytest.mark.ds @pytest.mark.parametrize("dstype", NS_DS_TYPE) def test_ds_not_supported(ansible_zos_module, dstype): - TEST_ENV["DS_TYPE"] = dstype - DsNotSupportedHelper( - TEST_INFO["test_ds_block_insertafter_regex"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_block_insertafter_regex"] - ) + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + test_name = "DST14" + ds_name = test_name.upper() + "." + ds_type + try: + results = hosts.all.shell(cmd='hlq') + for result in results.contacted.values(): + hlq = result.get("stdout") + assert len(hlq) <= 8 or hlq != '' + ds_name = test_name.upper() + "." + ds_type + results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') + for result in results.contacted.values(): + assert result.get("changed") is True + params["path"] = ds_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("msg") == "VSAM data set type is NOT supported" + finally: + hosts.all.zos_data_set(name=ds_name, state="absent") @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ["PDS","PDSE"]) def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): - TEST_ENV["DS_TYPE"] = dstype - DsGeneralForceFail( - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_block_insertafter_regex_force_fail"], - ) + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="", insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=False) + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + content = TEST_CONTENT + try: + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + batch=[ + { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", "state": "present", "replace": True, }, + { "name": params["path"], "type": "member", + "state": "present", "replace": True, }, + ] + ) + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + hosts.all.shell(cmd=cmdStr) + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + time.sleep(5) + # call lineinfile to see results + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == False + assert result.get("failed") == True + finally: + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") \ No newline at end of file diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 85f4184af..6a29c79b8 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -12,618 +12,238 @@ # limitations under the License. from __future__ import absolute_import, division, print_function -from ibm_zos_core.tests.helpers.zos_lineinfile_helper import ( - UssGeneral, - DsGeneral, - DsNotSupportedHelper, - DsGeneralResultKeyMatchesRegex, - DsGeneralForceFail, - DsGeneralForce, -) -import os -import sys +from shellescape import quote +import time +import re import pytest +import inspect __metaclass__ = type -TEST_CONTENT = """if [ -z STEPLIB ] && tty -s; +DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" +TEST_FOLDER_LINEINFILE = "/tmp/ansible-core-tests/zos_lineinfile/" + +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, "//'%s'", argv[1]); + FILE* member; + member = fopen(dsname, "rb,type=record"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/disp_shr/pdse-lock '{0}({1})' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + +TEST_CONTENT="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME export _BPXK_AUTOCVT""" -# supported data set types -DS_TYPE = ['SEQ', 'PDS', 'PDSE'] -# not supported data set types -NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] -ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] - -TEST_ENV = dict( - TEST_CONT=TEST_CONTENT, - TEST_DIR="/tmp/zos_lineinfile/", - TEST_FILE="", - DS_NAME="", - DS_TYPE="", - ENCODING="", -) - -TEST_INFO = dict( - test_uss_line_replace=dict( - path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", - state="present"), - test_uss_line_insertafter_regex=dict( - insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", - state="present"), - test_uss_line_insertbefore_regex=dict( - insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present"), - test_uss_line_insertafter_eof=dict( - insertafter="EOF", line="export ZOAU_ROOT", state="present"), - test_uss_line_insertbefore_bof=dict( - insertbefore="BOF", line="# this is file is for setting env vars", - state="present"), - test_uss_line_replace_match_insertafter_ignore=dict( - regexp="ZOAU_ROOT=", insertafter="PATH=", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present"), - test_uss_line_replace_match_insertbefore_ignore=dict( - regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", - state="present"), - test_uss_line_replace_nomatch_insertafter_match=dict( - regexp="abcxyz", insertafter="ZOAU_ROOT=", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present"), - test_uss_line_replace_nomatch_insertbefore_match=dict( - regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", - state="present"), - test_uss_line_replace_nomatch_insertafter_nomatch=dict( - regexp="abcxyz", insertafter="xyzijk", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present"), - test_uss_line_replace_nomatch_insertbefore_nomatch=dict( - regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", - state="present"), - test_uss_line_absent=dict(regexp="ZOAU_ROOT=", line="", state="absent"), - test_ds_line_replace=dict(test_name="T1"), - test_ds_line_insertafter_regex=dict(test_name="T2"), - test_ds_line_insertbefore_regex=dict(test_name="T3"), - test_ds_line_insertafter_eof=dict(test_name="T4"), - test_ds_line_insertbefore_bof=dict(test_name="T5"), - test_ds_line_replace_match_insertafter_ignore=dict(test_name="T6"), - test_ds_line_replace_match_insertbefore_ignore=dict(test_name="T7"), - test_ds_line_replace_nomatch_insertafter_match=dict(test_name="T8"), - test_ds_line_replace_nomatch_insertbefore_match=dict(test_name="T9"), - test_ds_line_replace_nomatch_insertafter_nomatch=dict(test_name="T10"), - test_ds_line_replace_nomatch_insertbefore_nomatch=dict(test_name="T11"), - test_ds_line_absent=dict(test_name="T12"), - test_ds_line_tmp_hlq_option=dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ"), - test_ds_line_force=dict(path="",insertafter="EOF", line="export ZOAU_ROOT", force=True), - test_ds_line_force_fail=dict(path="",insertafter="EOF", line="export ZOAU_ROOT", force=False), - test_ds_line_replace_force=dict(path="",regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", - state="present",force=True), - test_ds_line_insertafter_regex_force=dict(path="",insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", - state="present",force=True), - test_ds_line_insertbefore_regex_force=dict(path="",insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present",force=True), - test_ds_line_insertbefore_bof_force=dict(path="",insertbefore="BOF", line="# this is file is for setting env vars", - state="present",force=True), - test_ds_line_replace_match_insertafter_ignore_force=dict(path="",regexp="ZOAU_ROOT=", insertafter="PATH=", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present",force=True), - test_ds_line_replace_match_insertbefore_ignore_force=dict(path="",regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", - state="present",force=True), - test_ds_line_replace_nomatch_insertafter_match_force=dict(path="",regexp="abcxyz", insertafter="ZOAU_ROOT=", - line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present",force=True), - test_ds_line_replace_nomatch_insertbefore_match_force=dict(path="",regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", - state="present",force=True), - expected=dict(test_uss_line_replace="""if [ -z STEPLIB ] && tty -s; +EXPECTED_REPLACE="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/mvsutil-develop_dsed -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_insertafter_regex="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_INSERTAFTER_REGEX="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 ZOAU_ROOT=/mvsutil-develop_dsed -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_insertbefore_regex="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_INSERTBEFORE_REGEX="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 unset ZOAU_ROOT ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_insertafter_eof="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_INSERTAFTER_EOF="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME export _BPXK_AUTOCVT -export ZOAU_ROOT""", - test_uss_line_insertbefore_bof="""# this is file is for setting env vars +export ZOAU_ROOT""" + +EXPECTED_INSERTBEFORE_BOF="""# this is file is for setting env vars if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_match_insertafter_ignore="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_INSERTAFTER_IGNORE="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/mvsutil-develop_dsed -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_match_insertbefore_ignore="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_INSERTBEFORE_IGNORE="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 unset ZOAU_ROOT -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_nomatch_insertafter_match="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_NOMATCH_INSERTAFTER="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 ZOAU_ROOT=/mvsutil-develop_dsed -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_nomatch_insertbefore_match="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_NOMATCH_INSERTBEFORE="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 unset ZOAU_ROOT ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_nomatch_insertafter_nomatch="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME export _BPXK_AUTOCVT -ZOAU_ROOT=/mvsutil-develop_dsed""", - test_uss_line_replace_nomatch_insertbefore_nomatch="""if [ -z STEPLIB ] && tty -s; +ZOAU_ROOT=/mvsutil-develop_dsed""" + +EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME export _BPXK_AUTOCVT -unset ZOAU_ROOT""", - test_uss_line_absent="""if [ -z STEPLIB ] && tty -s; +unset ZOAU_ROOT""" + +EXPECTED_ABSENT="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_uss_line_replace_quoted="""if [ -z STEPLIB ] && tty -s; +export _BPXK_AUTOCVT""" + +EXPECTED_QUOTED="""if [ -z STEPLIB ] && tty -s; then export STEPLIB=none exec -a 0 SHELL fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 ZOAU_ROOT="/mvsutil-develop_dsed" -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT""", - test_ds_line_force="""if [ -z STEPLIB ] && tty -s; -then - export STEPLIB=none - exec -a 0 SHELL -fi -TZ=PST8PDT -export TZ -LANG=C -export LANG -readonly LOGNAME -PATH=/usr/lpp/zoautil/v100/bin:/usr/lpp/rsusr/ported/bin:/bin:/var/bin -export PATH -LIBPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -export LIBPATH -NLSPATH=/usr/lib/nls/msg/%L/%N -export NLSPATH -MANPATH=/usr/man/%L -export MANPATH -MAIL=/usr/mail/LOGNAME -export MAIL -umask 022 -ZOAU_ROOT=/usr/lpp/zoautil/v100 -ZOAUTIL_DIR=/usr/lpp/zoautil/v100 -PYTHONPATH=/usr/lpp/izoda/v110/anaconda/lib:/usr/lpp/zoautil/v100/lib:/lib -PKG_CONFIG_PATH=/usr/lpp/izoda/v110/anaconda/lib/pkgconfig -PYTHON_HOME=/usr/lpp/izoda/v110/anaconda -_BPXK_AUTOCVT=ON -export ZOAU_ROOT -export ZOAUTIL_DIR -export ZOAUTIL_DIR -export PYTHONPATH -export PKG_CONFIG_PATH -export PYTHON_HOME -export _BPXK_AUTOCVT -export ZOAU_ROOT"""), -) +export _BPXK_AUTOCVT""" + +EXPECTED_ENCODING="""SIMPLE LINE TO VERIFY +Insert this string""" +def set_uss_environment(ansible_zos_module, CONTENT, FILE): + hosts = ansible_zos_module + hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_LINEINFILE)) + hosts.all.file(path=FILE, state="touch") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, FILE)) + +def remove_uss_environment(ansible_zos_module): + hosts = ansible_zos_module + hosts.all.shell(cmd="rm -rf " + TEST_FOLDER_LINEINFILE) + +def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT): + hosts = ansible_zos_module + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) + hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) + if DS_TYPE in ["PDS", "PDSE"]: + DS_FULL_NAME = DS_NAME + "(MEM)" + hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) + else: + DS_FULL_NAME = DS_NAME + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), DS_FULL_NAME) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + return DS_FULL_NAME + +def remove_ds_environment(ansible_zos_module, DS_NAME): + hosts = ansible_zos_module + hosts.all.zos_data_set(name=DS_NAME, state="absent") +# supported data set types +DS_TYPE = ['SEQ', 'PDS', 'PDSE'] +# not supported data set types +NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] +# The encoding will be only use on a few test +ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] ######################### # USS test cases @@ -632,462 +252,762 @@ @pytest.mark.uss def test_uss_line_replace(ansible_zos_module): - UssGeneral( - "test_uss_line_replace", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace"], - TEST_INFO["expected"]["test_uss_line_replace"]) + hosts = ansible_zos_module + params = dict(regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_insertafter_regex(ansible_zos_module): - UssGeneral( - "test_uss_line_insertafter_regex", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertafter_regex"], - TEST_INFO["expected"]["test_uss_line_insertafter_regex"]) + hosts = ansible_zos_module + params = dict(insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_insertbefore_regex(ansible_zos_module): - UssGeneral( - "test_uss_line_insertbefore_regex", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_line_insertbefore_regex"]) + hosts = ansible_zos_module + params = dict(insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_insertafter_eof(ansible_zos_module): - UssGeneral( - "test_uss_line_insertafter_eof", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_insertafter_eof"], - TEST_INFO["expected"]["test_uss_line_insertafter_eof"]) + hosts = ansible_zos_module + params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_insertbefore_bof(ansible_zos_module): - UssGeneral( - "test_uss_line_insertbefore_bof", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_line_insertbefore_bof"]) + hosts = ansible_zos_module + params = dict(insertbefore="BOF", line="# this is file is for setting env vars", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_match_insertafter_ignore", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace_match_insertafter_ignore"], - TEST_INFO["expected"]["test_uss_line_replace_match_insertafter_ignore"] - ) + hosts = ansible_zos_module + params = dict(regexp="ZOAU_ROOT=", insertafter="PATH=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_match_insertbefore_ignore", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace_match_insertbefore_ignore"], - TEST_INFO["expected"] - ["test_uss_line_replace_match_insertbefore_ignore"] - ) + hosts = ansible_zos_module + params = dict(regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_nomatch_insertafter_match", ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace_nomatch_insertafter_match"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertafter_match"] - ) + hosts = ansible_zos_module + params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_nomatch_insertbefore_match", ansible_zos_module, - TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertbefore_match"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertbefore_match"] - ) + hosts = ansible_zos_module + params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_nomatch_insertafter_nomatch", - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertafter_nomatch"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertafter_nomatch"] - ) + hosts = ansible_zos_module + params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): - UssGeneral( - "test_uss_line_replace_nomatch_insertbefore_nomatch", - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertbefore_nomatch"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertbefore_nomatch"] - ) + hosts = ansible_zos_module + params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_absent(ansible_zos_module): - UssGeneral( - "test_uss_line_absent", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_absent"], - TEST_INFO["expected"]["test_uss_line_absent"]) + hosts = ansible_zos_module + params = dict(regexp="ZOAU_ROOT=", line="", state="absent") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_quoted_escaped(ansible_zos_module): - TEST_INFO["test_uss_line_replace"]["line"] = 'ZOAU_ROOT=\"/mvsutil-develop_dsed\"' - UssGeneral( - "test_uss_line_replace", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace"], - TEST_INFO["expected"]["test_uss_line_replace_quoted"]) + hosts = ansible_zos_module + params = dict(path="", regexp="ZOAU_ROOT=", line='ZOAU_ROOT=\"/mvsutil-develop_dsed\"', state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_QUOTED + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): - TEST_INFO["test_uss_line_replace"]["line"] = 'ZOAU_ROOT="/mvsutil-develop_dsed"' - UssGeneral( - "test_uss_line_replace", ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace"], - TEST_INFO["expected"]["test_uss_line_replace_quoted"]) + hosts = ansible_zos_module + params = dict(path="", regexp="ZOAU_ROOT=", line='ZOAU_ROOT="/mvsutil-develop_dsed"', state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_QUOTED + finally: + remove_uss_environment(ansible_zos_module) ######################### # Dataset test cases ######################### +# Now force is parameter to change witch function to call in the helper and alter the declaration by add the force or a test name required. +# without change the original description or the other option is that at the end of the test get back to original one. @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_insertafter_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_insertafter_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertafter_regex"], - TEST_INFO["expected"]["test_uss_line_insertafter_regex"] - ) +def test_ds_line_insertafter_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST1" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_insertbefore_regex(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_insertbefore_regex"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertbefore_regex"], - TEST_INFO["expected"]["test_uss_line_insertbefore_regex"] - ) +def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + test_name = "DST2" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_insertafter_eof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_insertafter_eof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertafter_eof"], - TEST_INFO["expected"]["test_uss_line_insertafter_eof"] - ) - +def test_ds_line_insertafter_eof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present") + test_name = "DST3" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_insertbefore_bof(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_insertbefore_bof"]["test_name"], - ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_insertbefore_bof"], - TEST_INFO["expected"]["test_uss_line_insertbefore_bof"] - ) +def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(insertbefore="BOF", line="# this is file is for setting env vars", state="present") + test_name = "DST4" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_match_insertafter_ignore( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_match_insertafter_ignore"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_match_insertafter_ignore"], - TEST_INFO["expected"]["test_uss_line_replace_match_insertafter_ignore"] - ) +def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="ZOAU_ROOT=", insertafter="PATH=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST5" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_match_insertbefore_ignore( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_match_insertbefore_ignore"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_match_insertbefore_ignore"], - TEST_INFO["expected"] - ["test_uss_line_replace_match_insertbefore_ignore"] - ) +def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", state="present") + test_name = "DST6" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_nomatch_insertafter_match( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_nomatch_insertafter_match"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertafter_match"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertafter_match"] - ) +def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST7" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_nomatch_insertbefore_match( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_nomatch_insertbefore_match"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertbefore_match"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertbefore_match"] - ) +def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + test_name = "DST8" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_nomatch_insertafter_nomatch( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_nomatch_insertafter_nomatch"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertafter_nomatch"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertafter_nomatch"] - ) +def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST9" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_replace_nomatch_insertbefore_nomatch( - ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_replace_nomatch_insertbefore_nomatch"] - ["test_name"], ansible_zos_module, TEST_ENV, - TEST_INFO["test_uss_line_replace_nomatch_insertbefore_nomatch"], - TEST_INFO["expected"] - ["test_uss_line_replace_nomatch_insertbefore_nomatch"] - ) +def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") + test_name = "DST10" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_line_absent(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneral( - TEST_INFO["test_ds_line_absent"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_absent"], - TEST_INFO["expected"]["test_uss_line_absent"] - ) +def test_ds_line_absent(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(regexp="ZOAU_ROOT=", line="", state="absent") + test_name = "DST11" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ABSENT + finally: + remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -def test_ds_tmp_hlq_option(ansible_zos_module, encoding): +def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets - TEST_ENV["DS_TYPE"] = 'SEQ' - TEST_ENV["ENCODING"] = encoding - test_name = "T12" + hosts = ansible_zos_module + ds_type = "SEQ" kwargs = dict(backup_name=r"TMPHLQ\..") - DsGeneralResultKeyMatchesRegex( - test_name, ansible_zos_module, - TEST_ENV, TEST_INFO["test_ds_line_tmp_hlq_option"], - **kwargs - ) - - + params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ") + test_name = "DST12" + temp_file = "/tmp/zos_lineinfile/" + test_name + content = TEST_CONTENT + try: + hosts.all.shell(cmd="mkdir -p {0}".format("/tmp/zos_lineinfile/")) + results = hosts.all.shell(cmd='hlq') + for result in results.contacted.values(): + hlq = result.get("stdout") + if len(hlq) > 8: + hlq = hlq[:8] + ds_full_name = hlq + "." + test_name.upper() + "." + ds_type + hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + "/tmp/zos_lineinfile/") + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(ds_full_name)) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + for key in kwargs: + assert re.match(kwargs.get(key), result.get(key)) + finally: + hosts.all.zos_data_set(name=ds_full_name, state="absent") + + +## Non supported test cases @pytest.mark.ds @pytest.mark.parametrize("dstype", NS_DS_TYPE) def test_ds_not_supported(ansible_zos_module, dstype): - TEST_ENV["DS_TYPE"] = dstype - DsNotSupportedHelper( - TEST_INFO["test_ds_line_replace"]["test_name"], ansible_zos_module, - TEST_ENV, TEST_INFO["test_uss_line_replace"] - ) - + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + test_name = "DST13" + ds_name = test_name.upper() + "." + ds_type + try: + results = hosts.all.shell(cmd='hlq') + for result in results.contacted.values(): + hlq = result.get("stdout") + assert len(hlq) <= 8 or hlq != '' + ds_name = test_name.upper() + "." + ds_type + results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') + for result in results.contacted.values(): + assert result.get("changed") is True + params["path"] = ds_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("msg") == "VSAM data set type is NOT supported" + finally: + hosts.all.zos_data_set(name=ds_name, state="absent") -######################### -# Dataset test cases with force -######################### @pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) @pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_force"], - TEST_INFO["expected"]["test_ds_line_force"] - ) +def test_ds_line_force(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="True") + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + content = TEST_CONTENT + if ds_type == "SEQ": + params["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + else: + params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + try: + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + batch=[ + { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", "state": "present", "replace": True, }, + { "name": params["path"], "type": "member", + "state": "present", "replace": True, }, + ] + ) + # write memeber to verify cases + if ds_type in ["PDS", "PDSE"]: + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + else: + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) + hosts.all.shell(cmd=cmdStr) + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + time.sleep(5) + # call lineinfile to see results + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == True + results = hosts.all.shell(cmd=r"""cat "//'{0}'" """.format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_REPLACE + finally: + hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_force_fail(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForceFail( - ansible_zos_module, TEST_ENV, - TEST_INFO["test_ds_line_force_fail"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_force"], - TEST_INFO["expected"]["test_uss_line_replace"] - ) +@pytest.mark.parametrize("dstype", ["PDS","PDSE"]) +def test_ds_line_force_fail(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="False") + MEMBER_1, MEMBER_2 = "MEM1", "MEM2" + TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + content = TEST_CONTENT + try: + # set up: + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + batch=[ + { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "type": "member", "state": "present", "replace": True, }, + { "name": params["path"], "type": "member", + "state": "present", "replace": True, }, + ] + ) + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + hosts.all.shell(cmd=cmdStr) + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + for result in results.contacted.values(): + assert int(result.get("stdout")) != 0 + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + dest='/tmp/disp_shr/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + time.sleep(5) + # call lineinfile to see results + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == False + assert result.get("failed") == True + finally: + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_insertafter_regex_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_insertafter_regex_force"], - TEST_INFO["expected"]["test_uss_line_insertafter_regex"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_insertbefore_regex_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_insertbefore_regex_force"], - TEST_INFO["expected"]["test_uss_line_insertbefore_regex"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_insertbefore_bof_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_insertbefore_bof_force"], - TEST_INFO["expected"]["test_uss_line_insertbefore_bof"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_match_insertafter_ignore_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_match_insertafter_ignore_force"], - TEST_INFO["expected"]["test_uss_line_replace_match_insertafter_ignore"] - ) - - -@pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_match_insertbefore_ignore_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_match_insertbefore_ignore_force"], - TEST_INFO["expected"]["test_uss_line_replace_match_insertbefore_ignore"] - ) - +######################### +# Encoding tests +######################### -@pytest.mark.ds +@pytest.mark.uss @pytest.mark.parametrize("encoding", ENCODING) -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertafter_match_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_nomatch_insertafter_match_force"], - TEST_INFO["expected"]["test_uss_line_replace_nomatch_insertafter_match"] - ) +def test_uss_encoding(ansible_zos_module, encoding): + hosts = ansible_zos_module + insert_data = "Insert this string" + params = dict(insertafter="SIMPLE", line=insert_data, state="present") + params["encoding"] = encoding + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = "SIMPLE LINE TO VERIFY" + try: + hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_LINEINFILE)) + hosts.all.file(path=full_path, state="touch") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) + hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"]) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.ds -@pytest.mark.parametrize("encoding", ENCODING) @pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertbefore_match_force(ansible_zos_module, dstype, encoding): - TEST_ENV["DS_TYPE"] = dstype - TEST_ENV["ENCODING"] = encoding - DsGeneralForce( - ansible_zos_module, TEST_ENV, - TEST_CONTENT, - TEST_INFO["test_ds_line_replace_nomatch_insertbefore_match_force"], - TEST_INFO["expected"]["test_uss_line_replace_nomatch_insertbefore_match"] - ) +@pytest.mark.parametrize("encoding", ["IBM-1047"]) +def test_ds_encoding(ansible_zos_module, encoding, dstype): + hosts = ansible_zos_module + ds_type = dstype + insert_data = "Insert this string" + params = dict(insertafter="SIMPLE", line=insert_data, state="present") + params["encoding"] = encoding + test_name = "DST13" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = "SIMPLE LINE TO VERIFY" + try: + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) + hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"]) + hosts.all.zos_data_set(name=ds_name, type=ds_type) + if ds_type in ["PDS", "PDSE"]: + ds_full_name = ds_name + "(MEM)" + hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) + else: + ds_full_name = ds_name + cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) + hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd="rm -rf " + temp_file) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + hosts.all.zos_encode(src=ds_full_name, dest=ds_full_name, from_encoding=params["encoding"], to_encoding="IBM-1047") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING + finally: + remove_ds_environment(ansible_zos_module, ds_name) \ No newline at end of file diff --git a/tests/helpers/zos_blockinfile_helper.py b/tests/helpers/zos_blockinfile_helper.py deleted file mode 100644 index f5aa178fe..000000000 --- a/tests/helpers/zos_blockinfile_helper.py +++ /dev/null @@ -1,345 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) IBM Corporation 2020, 2022, 2023 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function -from shellescape import quote -from pprint import pprint -import time -import re - - -__metaclass__ = type - - -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" - -c_pgm="""#include <stdio.h> -#include <stdlib.h> -#include <string.h> -int main(int argc, char** argv) -{ - char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); - FILE* member; - member = fopen(dsname, "rb,type=record"); - sleep(300); - fclose(member); - return 0; -} -""" - -call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M -//LOCKMEM EXEC PGM=BPXBATCH -//STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}({1})' -//STDIN DD DUMMY -//STDOUT DD SYSOUT=* -//STDERR DD SYSOUT=* -//""" - - -def set_uss_test_env(test_name, hosts, test_env): - test_env["TEST_FILE"] = test_env["TEST_DIR"] + test_name - try: - hosts.all.shell(cmd="mkdir -p {0}".format(test_env["TEST_DIR"])) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], test_env["TEST_FILE"])) - except Exception: - clean_uss_test_env(test_env["TEST_DIR"], hosts) - assert 1 == 0, "Failed to set the test env" - - -def clean_uss_test_env(test_dir, hosts): - try: - hosts.all.shell(cmd="rm -rf " + test_dir) - except Exception: - assert 1 == 0, "Failed to clean the test env" - - -def UssGeneral(test_name, ansible_zos_module, test_env, test_info, expected): - hosts = ansible_zos_module - set_uss_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["TEST_FILE"] - blockinfile_results = hosts.all.zos_blockinfile(**test_info) - for result in blockinfile_results.contacted.values(): - pprint(result) - assert result.get("changed") == 1 - cmdStr = "cat {0}".format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - pprint(result) - assert result.get("stdout") == expected - clean_uss_test_env(test_env["TEST_DIR"], hosts) - return blockinfile_results - - -def set_ds_test_env(test_name, hosts, test_env): - TEMP_FILE = test_env["TEST_DIR"] + test_name - hosts.all.shell(cmd="mkdir -p {0}".format(test_env["TEST_DIR"])) - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - if len(hlq) > 8: - hlq = hlq[:8] - test_env["DS_NAME"] = hlq + "." + test_name.upper() + "." + test_env["DS_TYPE"] - try: - hosts.all.zos_data_set(name=test_env["DS_NAME"], type=test_env["DS_TYPE"], replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], TEMP_FILE)) - if test_env["DS_TYPE"] in ["PDS", "PDSE"]: - test_env["DS_NAME"] = test_env["DS_NAME"] + "(MEM)" - hosts.all.zos_data_set(name=test_env["DS_NAME"], state="present", type="member") - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) - else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) - - if test_env["ENCODING"] != "IBM-1047": - hosts.all.zos_encode( - src=TEMP_FILE, - dest=test_env["DS_NAME"], - encoding={ - "from": "IBM-1047", - "to": test_env["ENCODING"], - }, - ) - else: - hosts.all.shell(cmd=cmdStr) - hosts.all.shell(cmd="rm -rf " + test_env["TEST_DIR"]) - cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - assert int(result.get("stdout")) != 0 - except Exception: - clean_ds_test_env(test_env["DS_NAME"], hosts) - assert 1 == 0, "Failed to set the test env" - - -def clean_ds_test_env(ds_name, hosts): - ds_name = ds_name.replace("(MEM)", "") - try: - hosts.all.zos_data_set(name=ds_name, state="absent") - except Exception: - assert 1 == 0, "Failed to clean the test env" - - -def DsGeneral(test_name, ansible_zos_module, test_env, test_info, expected): - hosts = ansible_zos_module - set_ds_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["DS_NAME"] - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - blockinfile_results = hosts.all.zos_blockinfile(**test_info) - for result in blockinfile_results.contacted.values(): - pprint(result) - assert result.get("changed") == 1 - if test_env["ENCODING"] == 'IBM-1047': - cmdStr = "cat \"//'{0}'\" ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - pprint(result) - #assert result.get("stdout") == expected - assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') - clean_ds_test_env(test_env["DS_NAME"], hosts) - return blockinfile_results - - -def DsNotSupportedHelper(test_name, ansible_zos_module, test_env, test_info): - hosts = ansible_zos_module - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - assert len(hlq) <= 8 or hlq != '' - test_env["DS_NAME"] = hlq + "." + test_name.upper() + "." + test_env["DS_TYPE"] - results = hosts.all.zos_data_set(name=test_env["DS_NAME"], type=test_env["DS_TYPE"], replace='yes') - for result in results.contacted.values(): - pprint(result) - assert result.get("changed") is True - test_info["path"] = test_env["DS_NAME"] - results = hosts.all.zos_blockinfile(**test_info) - for result in results.contacted.values(): - pprint(result) - assert result.get("changed") is False - assert result.get("msg") == "VSAM data set type is NOT supported" - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsGeneralResultKeyMatchesRegex(test_name, ansible_zos_module, test_env, test_info, **kwargs): - hosts = ansible_zos_module - set_ds_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["DS_NAME"] - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - results = hosts.all.zos_blockinfile(**test_info) - for result in results.contacted.values(): - pprint(result) - for key in kwargs: - assert re.match(kwargs.get(key), result.get(key)) - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsGeneralForce(ansible_zos_module, test_env, test_info, expected): - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - if test_env["DS_TYPE"] == "SEQ": - test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) - test_info["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) - else: - test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - hosts = ansible_zos_module - try: - # set up: - # create pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=test_env["DS_TYPE"], replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], TEMP_FILE)) - # add members - hosts.all.zos_data_set( - batch=[ - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": test_env["DS_NAME"], - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) - # write memeber to verify cases - # print(test_env["TEST_CONT"]) - if test_env["DS_TYPE"] in ["PDS", "PDSE"]: - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) - else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - hosts.all.shell(cmd=cmdStr) - cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert int(result.get("stdout")) != 0 - if test_env["ENCODING"] != 'IBM-1047': - hosts.all.zos_encode( - src=TEMP_FILE, - dest=test_env["DS_NAME"], - encoding={ - "from": "IBM-1047", - "to": test_env["ENCODING"], - }, - ) - # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - - # submit jcl - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") - - # pause to ensure c code acquires lock - time.sleep(5) - - blockinfile_results = hosts.all.zos_blockinfile(**test_info) - for result in blockinfile_results.contacted.values(): - assert result.get("changed") == True - - - if test_env["ENCODING"] == 'IBM-1047': - cmdStr = "cat \"//'{0}'\" ".format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - pprint(result) - assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') - else: - cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == True - finally: - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) - # extract pid - ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") - # kill process - release lock - this also seems to end the job - pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) - # clean up c code/object/executable files, jcl - hosts.all.shell(cmd='rm -r /tmp/disp_shr') - # remove pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - return blockinfile_results - - -def DsGeneralForceFail(ansible_zos_module, test_env, test_info): - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - hosts = ansible_zos_module - test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - try: - # set up: - # create pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) - # add members - hosts.all.zos_data_set( - batch=[ - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) - # write memeber to verify cases - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], test_info["path"])) - # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - - # submit jcl - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") - - # pause to ensure c code acquires lock - time.sleep(5) - - blockinfile_results = hosts.all.zos_blockinfile(**test_info) - for result in blockinfile_results.contacted.values(): - pprint(result) - assert result.get("changed") == False - assert result.get("failed") == True - finally: - # extract pid - ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") - # kill process - release lock - this also seems to end the job - pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) - # clean up c code/object/executable files, jcl - hosts.all.shell(cmd='rm -r /tmp/disp_shr') - # remove pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") \ No newline at end of file diff --git a/tests/helpers/zos_lineinfile_helper.py b/tests/helpers/zos_lineinfile_helper.py deleted file mode 100644 index bac392e80..000000000 --- a/tests/helpers/zos_lineinfile_helper.py +++ /dev/null @@ -1,340 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) IBM Corporation 2020, 2022, 2023 -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function -from shellescape import quote -import time -from pprint import pprint -import re - -__metaclass__ = type - -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" - -c_pgm="""#include <stdio.h> -#include <stdlib.h> -#include <string.h> -int main(int argc, char** argv) -{ - char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); - FILE* member; - member = fopen(dsname, "rb,type=record"); - sleep(300); - fclose(member); - return 0; -} -""" - -call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M -//LOCKMEM EXEC PGM=BPXBATCH -//STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}({1})' -//STDIN DD DUMMY -//STDOUT DD SYSOUT=* -//STDERR DD SYSOUT=* -//""" - - -def set_uss_test_env(test_name, hosts, test_env): - test_env["TEST_FILE"] = test_env["TEST_DIR"] + test_name - try: - hosts.all.shell(cmd="mkdir -p {0}".format(test_env["TEST_DIR"])) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], test_env["TEST_FILE"])) - except Exception: - clean_uss_test_env(test_env["TEST_DIR"], hosts) - assert 1 == 0, "Failed to set the test env" - - -def clean_uss_test_env(test_dir, hosts): - try: - hosts.all.shell(cmd="rm -rf " + test_dir) - except Exception: - assert 1 == 0, "Failed to clean the test env" - - -def UssGeneral(test_name, ansible_zos_module, test_env, test_info, expected): - hosts = ansible_zos_module - set_uss_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["TEST_FILE"] - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == 1 - cmdStr = "cat {0}".format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("stdout") == expected - clean_uss_test_env(test_env["TEST_DIR"], hosts) - - -def set_ds_test_env(test_name, hosts, test_env): - TEMP_FILE = "/tmp/" + test_name - """ - encoding = test_env["ENCODING"].replace("-", "").replace(".", "").upper() - try: - int(encoding[0]) - encoding = "E" + encoding - except: - pass - if len(encoding) > 7: - encoding = encoding[:4] + encoding[-4:] - """ - # simplifying dataset name, zos_encode seems to have issues with some dataset names (can be from ZOAU) - encoding = "ENC" - test_env["DS_NAME"] = test_name.upper() + "." + encoding + "." + test_env["DS_TYPE"] - - try: - hosts.all.zos_data_set(name=test_env["DS_NAME"], type=test_env["DS_TYPE"]) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], TEMP_FILE)) - if test_env["DS_TYPE"] in ["PDS", "PDSE"]: - test_env["DS_NAME"] = test_env["DS_NAME"] + "(MEM)" - hosts.all.zos_data_set(name=test_env["DS_NAME"], state="present", type="member") - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) - else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) - - if test_env["ENCODING"] != "IBM-1047": - hosts.all.zos_encode( - src=TEMP_FILE, - dest=test_env["DS_NAME"], - encoding={ - "from": "IBM-1047", - "to": test_env["ENCODING"], - }, - ) - else: - hosts.all.shell(cmd=cmdStr) - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) - cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert int(result.get("stdout")) != 0 - except Exception: - clean_ds_test_env(test_env["DS_NAME"], hosts) - assert 1 == 0, "Failed to set the test env" - - -def clean_ds_test_env(ds_name, hosts): - ds_name = ds_name.replace("(MEM)", "") - try: - hosts.all.zos_data_set(name=ds_name, state="absent") - except Exception: - assert 1 == 0, "Failed to clean the test env" - - -def DsGeneral(test_name, ansible_zos_module, test_env, test_info, expected): - hosts = ansible_zos_module - set_ds_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["DS_NAME"] - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == 1 - if test_env["ENCODING"] == 'IBM-1047': - cmdStr = "cat \"//'{0}'\" ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("stdout").replace('\n', '').replace(' ', '') == expected.replace('\n', '').replace(' ', '') - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsNotSupportedHelper(test_name, ansible_zos_module, test_env, test_info): - hosts = ansible_zos_module - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - assert len(hlq) <= 8 or hlq != '' - test_env["DS_NAME"] = test_name.upper() + "." + test_name.upper() + "." + test_env["DS_TYPE"] - results = hosts.all.zos_data_set(name=test_env["DS_NAME"], type=test_env["DS_TYPE"], replace='yes') - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") is True - test_info["path"] = test_env["DS_NAME"] - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") is False - assert result.get("msg") == "VSAM data set type is NOT supported" - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsGeneralResultKeyMatchesRegex(test_name, ansible_zos_module, test_env, test_info, **kwargs): - hosts = ansible_zos_module - set_ds_test_env(test_name, hosts, test_env) - test_info["path"] = test_env["DS_NAME"] - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - for key in kwargs: - assert re.match(kwargs.get(key), result.get(key)) - clean_ds_test_env(test_env["DS_NAME"], hosts) - - -def DsGeneralForce(ansible_zos_module, test_env, test_text, test_info, expected): - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - if test_env["DS_TYPE"] == "SEQ": - test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) - test_info["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) - else: - test_env["DS_NAME"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - hosts = ansible_zos_module - try: - # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=test_env["DS_TYPE"], replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_text, TEMP_FILE)) - # add members - hosts.all.zos_data_set( - batch=[ - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": test_env["DS_NAME"], - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) - # write memeber to verify cases - if test_env["DS_TYPE"] in ["PDS", "PDSE"]: - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"]) - else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"]) - if test_env["ENCODING"]: - test_info["encoding"] = test_env["ENCODING"] - hosts.all.shell(cmd=cmdStr) - cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert int(result.get("stdout")) != 0 - if test_env["ENCODING"] != 'IBM-1047': - hosts.all.zos_encode( - src=TEMP_FILE, - dest=test_env["DS_NAME"], - encoding={ - "from": "IBM-1047", - "to": test_env["ENCODING"], - }, - ) - # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - # submit jcl - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") - - # pause to ensure c code acquires lock - time.sleep(5) - # call line infile to see results - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - - if test_env["ENCODING"] == 'IBM-1047': - cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("stdout") == expected - else: - cmdStr =r"""cat "//'{0}'" """.format(test_info["path"]) - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == True - #assert result.get("stdout") == expected - - finally: - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) - # extract pid - ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") - - # kill process - release lock - this also seems to end the job - pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) - # clean up c code/object/executable files, jcl - hosts.all.shell(cmd='rm -r /tmp/disp_shr') - # remove pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - -def DsGeneralForceFail(ansible_zos_module, test_env, test_info): - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - hosts = ansible_zos_module - test_info["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) - try: - # set up: - # create pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) - # add members - hosts.all.zos_data_set( - batch=[ - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) - # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - # submit jcl - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") - # pause to ensure c code acquires lock - time.sleep(5) - # call line infile to see results - results = hosts.all.zos_lineinfile(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("changed") == False - assert result.get("failed") == True - finally: - # extract pid - ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") - # kill process - release lock - this also seems to end the job - pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) - # clean up c code/object/executable files, jcl - hosts.all.shell(cmd='rm -r /tmp/disp_shr') - # remove pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") From 0dc808736b9f5aba7f5f176896529ae539987ea0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 18 Jul 2023 14:10:57 -0600 Subject: [PATCH 144/495] Functional tso command test cases currently can not be run concurrently#880 (#895) * Remove all dependent test and summary in one * Add fragment * Solve multiple commands and text about the explanation of testing * Change variables to be accurate * Remove import do not used * Add comments * Solve typos and writting --------- Co-authored-by: ketankelkar <ktnklkr@gmail.com> --- ..._currently_can_not_be_run_concurrently.yml | 6 + .../modules/test_zos_tso_command_func.py | 115 +++++++----------- 2 files changed, 47 insertions(+), 74 deletions(-) create mode 100644 changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml diff --git a/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml b/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml new file mode 100644 index 000000000..64ab4871c --- /dev/null +++ b/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml @@ -0,0 +1,6 @@ +trivial: +- zos_tso_command - Test suite was set up to run sequentially such that + certain tests relied on prior test cases. The new changes combine those + inter-dependent test cases into a single test case so that each individual + test case can now be run stand-alone. + (https://github.com/ansible-collections/ibm_zos_core/pull/895) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index dbdf888f4..44436d3da 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -15,21 +15,18 @@ __metaclass__ = type -import os -import sys -import warnings - import ansible.constants import ansible.errors import ansible.utils -import pytest +DEFAULT_TEMP_DATASET="imstestl.ims1.temp.ps" def test_zos_tso_command_run_help(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_tso_command(commands=["help"]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True @@ -45,89 +42,54 @@ def test_zos_tso_command_long_command_128_chars(ansible_zos_module): ] results = hosts.all.zos_tso_command(commands=command_string) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True -# The happy path test -# Run a long tso command to allocate a dataset. -def test_zos_tso_command_long_unauth_command_116_chars(ansible_zos_module): +def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): hosts = ansible_zos_module command_string = [ - "alloc da('imstestl.ims1.temp.ps') catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)" + "alloc da('{0}') catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)".format(DEFAULT_TEMP_DATASET) ] - results = hosts.all.zos_tso_command(commands=command_string) - for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 - assert result.get("changed") is True - - -# The positive path test -def test_zos_tso_command_auth_command_listds(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["LISTDS 'imstestl.ims1.temp.ps'"]) - for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 - assert result.get("changed") is True - - -# The positive path test -# tests that single command works as well -def test_zos_tso_single_command_auth_command_listds(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands="LISTDS 'imstestl.ims1.temp.ps'") - for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + results_allocate = hosts.all.zos_tso_command(commands=command_string) + # Validate the correct allocation of dataset + for result in results_allocate.contacted.values(): + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The positive path test -# tests that single command works as well with alias -def test_zos_tso_command_auth_command_listds_using_alias(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(command=["LISTDS 'imstestl.ims1.temp.ps'"]) + # Validate listds of datasets and validate LISTDS using alias param 'command' of auth command + results = hosts.all.zos_tso_command(commands=["LISTDS '{0}'".format(DEFAULT_TEMP_DATASET)]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The positive path test -# tests that alias "command" works -def test_zos_tso_single_command_auth_command_listds_using_alias(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(command="LISTDS 'imstestl.ims1.temp.ps'") + # Validate LISTDS using alias param 'command' + results = hosts.all.zos_tso_command(command="LISTDS '{0}'".format(DEFAULT_TEMP_DATASET)) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The positive path test -def test_zos_tso_command_unauth_command_listcat(ansible_zos_module): - hosts = ansible_zos_module + # Validate LISTCAT command and an unauth command results = hosts.all.zos_tso_command( - commands=["LISTCAT ENT('imstestl.ims1.temp.ps')"] + commands=["LISTCAT ENT('{0}')".format(DEFAULT_TEMP_DATASET)] ) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The positive path test -def test_zos_tso_command_both_unauth_and_auth_command(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["delete 'imstestl.ims1.temp.ps'"]) + # Validate remove dataset + results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(DEFAULT_TEMP_DATASET)]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 0 + for item in result.get("output"): + assert item.get("rc") == 0 assert result.get("changed") is True - - -# The failure path test -# the dataset has be deleted. -def test_zos_tso_command_valid_command_failed_as_has_been_deleted(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["delete 'imstestl.ims1.temp.ps'"]) + # Expect the tso_command to fail here because the previous command will have already deleted the data set + # Validate data set was removed by previous call + results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(DEFAULT_TEMP_DATASET)]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 8 + for item in result.get("output"): + assert item.get("rc") == 8 assert result.get("changed") is False @@ -137,6 +99,8 @@ def test_zos_tso_command_empty_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_tso_command(commands=[""]) for result in results.contacted.values(): + for item in result.get("output"): + assert item.get("rc") == 255 assert result.get("changed") is False @@ -146,7 +110,8 @@ def test_zos_tso_command_invalid_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_tso_command(commands=["xxxxxx"]) for result in results.contacted.values(): - assert result.get("output")[0].get("rc") == 255 + for item in result.get("output"): + assert item.get("rc") == 255 assert result.get("changed") is False @@ -158,7 +123,10 @@ def test_zos_tso_command_multiple_commands(ansible_zos_module): results = hosts.all.zos_tso_command(commands=commands_list) for result in results.contacted.values(): for item in result.get("output"): - assert item.get("rc") == 0 + if item.get("command") == "LU omvsadm": + assert item.get("rc") == 0 + if item.get("command") == "LISTGRP": + assert item.get("rc") == 0 assert result.get("changed") is True @@ -169,6 +137,5 @@ def test_zos_tso_command_maxrc(ansible_zos_module): results = hosts.all.zos_tso_command(commands=["LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC"],max_rc=4) for result in results.contacted.values(): for item in result.get("output"): - print( item ) assert item.get("rc") < 5 assert result.get("changed") is True From 04880d25c137c60ed9e0965e5848c07b641d4113 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Sat, 22 Jul 2023 09:23:27 -0700 Subject: [PATCH 145/495] Update make file doc generation with pre and post scripts for a subset of modules. (#906) * Update make file and add scripts to correct doc gen Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc with missing definition of timestamp Signed-off-by: ddimatos <dimatos@gmail.com> * Correct doc to remove colon to prevent doc gen warning Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc base on minor module doc changes Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/906-update-doc-generation.yml | 4 ++ docs/Makefile | 9 ++++ docs/scripts/post-template.sh | 23 ++++++++ docs/scripts/post-zos_apf.sh | 32 ++++++++++++ docs/scripts/pre-template.sh | 32 ++++++++++++ docs/source/modules/zos_apf.rst | 4 +- docs/source/modules/zos_archive.rst | 3 +- docs/source/modules/zos_data_set.rst | 4 +- docs/source/modules/zos_job_output.rst | 52 +++++++++++++++++++ docs/source/modules/zos_job_query.rst | 32 ++++++++---- docs/source/modules/zos_job_submit.rst | 33 ++++++++---- docs/source/modules/zos_unarchive.rst | 1 - plugins/modules/zos_apf.py | 2 + plugins/modules/zos_archive.py | 2 +- 14 files changed, 208 insertions(+), 25 deletions(-) create mode 100644 changelogs/fragments/906-update-doc-generation.yml create mode 100755 docs/scripts/post-template.sh create mode 100755 docs/scripts/post-zos_apf.sh create mode 100755 docs/scripts/pre-template.sh diff --git a/changelogs/fragments/906-update-doc-generation.yml b/changelogs/fragments/906-update-doc-generation.yml new file mode 100644 index 000000000..f2e5ae316 --- /dev/null +++ b/changelogs/fragments/906-update-doc-generation.yml @@ -0,0 +1,4 @@ +trivial: +- make - Current doc generation requires manual intervention, this change will + allow for doc generation without any manual intervention and removes warnings. + (https://github.com/ansible-collections/ibm_zos_core/pull/906) \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile index 573448a66..5f412c510 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -172,9 +172,18 @@ module-doc: mv ../plugins/modules/__init__.py ../plugins/modules/__init__.py.skip; \ fi + @echo "Replacing string based carriage returns with literal escaped to produce sphynx consumable RST." + scripts/pre-template.sh + @echo "Generating ReStructuredText for all ansible modules found at '../plugins/modules/' to 'source/modules'." @ansible-doc-extractor --template templates/module.rst.j2 source/modules ../plugins/modules/*.py + @echo "Updating zos_apf file." + scripts/post-zos_apf.sh + + @echo "Reverting edited source file." + scripts/post-template.sh + @if test -e ../plugins/modules/__init__.py.skip; \ echo "Rename file '../plugins/modules/__init__.py.skip' back to ../plugins/modules/__init__.py.'"; \ then mv -f ../plugins/modules/__init__.py.skip ../plugins/modules/__init__.py; \ diff --git a/docs/scripts/post-template.sh b/docs/scripts/post-template.sh new file mode 100755 index 000000000..73175ec35 --- /dev/null +++ b/docs/scripts/post-template.sh @@ -0,0 +1,23 @@ +#!/bin/sh + +################################################################################ +# © Copyright IBM Corporation 2020 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +################################################################################ +# This scripts actions called before after generating RST such that the +# original template.py is put back in its original state. +################################################################################ + +# Obtain the galaxy collection installion up to the template.py located on the host +template_doc_source=`ansible-config dump|grep DEFAULT_MODULE_PATH| cut -d'=' -f2|sed 's/[][]//g' | tr -d \'\" |sed 's/modules/doc_fragments\/template.py/g'` +mv $template_doc_source.tmp $template_doc_source \ No newline at end of file diff --git a/docs/scripts/post-zos_apf.sh b/docs/scripts/post-zos_apf.sh new file mode 100755 index 000000000..a74207e48 --- /dev/null +++ b/docs/scripts/post-zos_apf.sh @@ -0,0 +1,32 @@ +#!/bin/sh + +################################################################################ +# © Copyright IBM Corporation 2020 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +################################################################################ +# This scripts actions called after generating RST and before generating Html. +# This script corrects the RST so that correct HTMl can be generated removing the +# warning: +# ibm_zos_core/docs/source/modules.rst:23: WARNING: toctree glob pattern 'modules/*' didn't match any documents +# This script will replace: +# " | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> */" +# To this: +# " | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> \*/" +################################################################################ +set -x +SCRIPT_DIR=`dirname "$0"` +CURR_PATH=`pwd` +# Delete any temporary index RST +if [[ -f $CURR_PATH/source/modules/zos_apf.rst ]]; then + sed -i '' "s/\> \\*\//\> \\\*\//g" $CURR_PATH/source/modules/zos_apf.rst +fi diff --git a/docs/scripts/pre-template.sh b/docs/scripts/pre-template.sh new file mode 100755 index 000000000..8c627e0a5 --- /dev/null +++ b/docs/scripts/pre-template.sh @@ -0,0 +1,32 @@ +#!/bin/sh + +################################################################################ +# © Copyright IBM Corporation 2020 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +################################################################################ +# This scripts actions called before generating RST, this scripts leaves the +# "\n", "\r", "\r\n" in the template.py doc_fragment so that ansible linting +# test will pass such that the doc and module are match. Later this script will +# update the above strings to liters with an esacpe, for example "\n" --> '\\n'. +# This allows for RST to be generated that is usable by the ansible-doc-extractor +# and Jinja2 template, and later sphinx html. +# This requries that the ansible collection be prebuilt so that it can find +# the template.py within the collection (not within the git project). Thus run +# './ac --ac-build' before the make file that builds doc. +################################################################################ + +template_doc_source=`ansible-config dump|grep DEFAULT_MODULE_PATH| cut -d'=' -f2|sed 's/[][]//g' | tr -d \'\" |sed 's/modules/doc_fragments\/template.py/g'` +cp $template_doc_source $template_doc_source.tmp +sed -i '' "s/\"\\\\n\"/'\\\\\\\\n'/g" $template_doc_source +sed -i '' "s/\"\\\\r\"/'\\\\\\\\r'/g" $template_doc_source +sed -i '' "s/\"\\\\r\\\\n\"/'\\\\\\\\r\\\\\\\\n'/g" $template_doc_source diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index c1f3c3fd9..195b34611 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -128,9 +128,11 @@ persistent ``{mark}`` length may not exceed 72 characters. + The timestamp (<timestamp>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format + | **required**: False | **type**: str - | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> */ + | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> \*/ backup diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index bb4383f74..221de41ec 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -26,7 +26,6 @@ Synopsis - Parameters ---------- @@ -122,7 +121,7 @@ dest exclude Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from path list and glob expansion. - Patterns (wildcards) can contain one of the following: ?, *. + Patterns (wildcards) can contain one of the following, `?`, `*`. * matches everything. diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 9e051bece..ddcc97a8b 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -146,7 +146,7 @@ record_format | **required**: False | **type**: str | **default**: FB - | **choices**: FB, VB, FBA, VBA, U + | **choices**: FB, VB, FBA, VBA, U, F sms_storage_class @@ -417,7 +417,7 @@ batch | **required**: False | **type**: str | **default**: FB - | **choices**: FB, VB, FBA, VBA, U + | **choices**: FB, VB, FBA, VBA, U, F sms_storage_class diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index bf57fb03a..76ae0364c 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -217,9 +217,14 @@ jobs "stepname": "STEP0001" } ], + "duration": 0, + "job_class": "R", "job_id": "JOB00134", "job_name": "HELLO", "owner": "OMVSADM", + "priority": "1", + "program_name": "IEBGENER", + "queue_position": "58", "ret_code": { "code": 0, "msg": "CC 0000", @@ -265,6 +270,18 @@ jobs | **type**: str | **sample**: JOB + creation_date + Date, local to the target system, when the job was created. + + | **type**: str + | **sample**: 2023-05-04 + + creation_time + Time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 14:15:00 + ddnames Data definition names. @@ -334,6 +351,41 @@ jobs ] + job_class + Job class for this job. + + | **type**: str + | **sample**: A + + svc_class + Service class for this job. + + | **type**: str + | **sample**: C + + priority + A numeric indicator of the job priority assigned through JES. + + | **type**: int + | **sample**: 4 + + asid + The address Space Identifier (ASID) that is a unique descriptor for the job address space. Zero if not active. + + | **type**: int + + queue_position + The position within the job queue where the jobs resides. + + | **type**: int + | **sample**: 3 + + program_name + The name of the program found in the job's last completed step found in the PGM parameter. + + | **type**: str + | **sample**: IEBGENER + ret_code Return code output collected from job log. diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 40bd7b353..519f5801a 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -134,7 +134,8 @@ jobs [ { "asid": 0, - "creation_datetime": "20230503T121300", + "creation_date": "2023-05-03", + "creation_time": "12:13:00", "job_class": "K", "job_id": "JOB01427", "job_name": "LINKJOB", @@ -146,7 +147,8 @@ jobs }, { "asid": 4, - "creation_datetime": "20230503T121400", + "creation_date": "2023-05-03", + "creation_time": "12:14:00", "job_class": "A", "job_id": "JOB16577", "job_name": "LINKCBL", @@ -245,13 +247,13 @@ jobs job_class - Letter indicating job class for this job. + Job class for this job. | **type**: str | **sample**: A svc_class - Character indicating service class for this job. + Service class for this job. | **type**: str | **sample**: C @@ -263,22 +265,34 @@ jobs | **sample**: 4 asid - An identifier created by JES. + The address Space Identifier (ASID) that is a unique descriptor for the job address space. Zero if not active. | **type**: int - creation_datetime - Date and time, local to the target system, when the job was created. + creation_date + Date, local to the target system, when the job was created. | **type**: str - | **sample**: 20230504T141500 + | **sample**: 2023-05-04 + + creation_time + Time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 14:15:00 queue_position - Integer of the position within the job queue where this jobs resided. + The position within the job queue where the jobs resides. | **type**: int | **sample**: 3 + program_name + The name of the program found in the job's last completed step found in the PGM parameter. + + | **type**: str + | **sample**: IEBGENER + message Message returned on failure. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 9714f2766..8d5b8ecef 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -353,7 +353,8 @@ jobs "asid": 0, "class": "K", "content_type": "JOB", - "creation_datetime": "20230503T121300", + "creation_date": "2023-05-03", + "creation_time": "12:13:00", "ddnames": [ { "byte_count": "677", @@ -553,6 +554,7 @@ jobs "job_name": "DBDGEN00", "owner": "OMVSADM", "priority": 1, + "program_name": "IEBGENER", "queue_position": 3, "ret_code": { "code": 0, @@ -567,7 +569,8 @@ jobs ] }, "subsystem": "STL1", - "svc_class": "?" + "svc_class": "?", + "system": "STL1" } ] @@ -722,13 +725,13 @@ jobs job_class - Letter indicating job class for this job. + Job class for this job. | **type**: str | **sample**: A svc_class - Character indicating service class for this job. + Service class for this job. | **type**: str | **sample**: C @@ -740,22 +743,34 @@ jobs | **sample**: 4 asid - An identifier created by JES. + The address Space Identifier (ASID) that is a unique descriptor for the job address space. Zero if not active. | **type**: int - creation_datetime - Date and time, local to the target system, when the job was created. + creation_date + Date, local to the target system, when the job was created. | **type**: str - | **sample**: 20230504T141500 + | **sample**: 2023-05-04 + + creation_time + Time, local to the target system, when the job was created. + + | **type**: str + | **sample**: 14:15:00 queue_position - Integer of the position within the job queue where this jobs resided. + The position within the job queue where the jobs resides. | **type**: int | **sample**: 3 + program_name + The name of the program found in the job's last completed step found in the PGM parameter. + + | **type**: str + | **sample**: IEBGENER + message This option is being deprecated diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index ecced2362..ae3b92516 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -26,7 +26,6 @@ Synopsis - Parameters ---------- diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 072deb29b..d3a945d1b 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -118,6 +118,8 @@ - Using a custom marker without the C({mark}) variable may result in the block being repeatedly inserted on subsequent playbook runs. - C({mark}) length may not exceed 72 characters. + - The timestamp (<timestamp>) used in the default marker + follows the '+%Y%m%d-%H%M%S' date format required: False type: str default: "/* {mark} ANSIBLE MANAGED BLOCK <timestamp> */" diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 8b887e1bf..0ace2b608 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -117,7 +117,7 @@ - Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from path list and glob expansion. - - "Patterns (wildcards) can contain one of the following: ?, *." + - "Patterns (wildcards) can contain one of the following, `?`, `*`." - "* matches everything." - "? matches any single character." type: list From 2ef7147f034404100dfeac1e4844d2573e0ea85f Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 26 Jul 2023 14:18:40 -0400 Subject: [PATCH 146/495] Enhance/911/Improve-job-query-performance (#911) * Initial change to manage 1.2.4 column changes for job listing * Improved documentation, code validated with playbooks. * Updated changelog fragment with PR * corrected duplicate entry in output documentation * Changes to comments and field names as per PR 841 Update to in-code documentation Expansion of changelog fragment * correction to datetime processing * fixing sample data * changed job to pass column options to disable the program_name column tested against fresh zoau build (881) * removed 'testing' comment * updated re-request call to use the kwargs function. note: this is for record-keeping... about to rebuild this section * Modified call chain in job:status to not pull dd's, making it faster added new 'don't get dd' variable in get_job_output internal * corrected pep8 issues (spaces and parentheses) * Addressing requested changes: eliminate double negative Added new changelog fragment, corrected link to PR Updated return documentation to show dependancy on zoau version * Correct grammar in changelog Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../911-Improve-job-query-performance.yml | 4 + plugins/module_utils/job.py | 197 +++++++++--------- plugins/modules/zos_job_output.py | 1 + plugins/modules/zos_job_query.py | 1 + plugins/modules/zos_job_submit.py | 1 + 5 files changed, 106 insertions(+), 98 deletions(-) create mode 100644 changelogs/fragments/911-Improve-job-query-performance.yml diff --git a/changelogs/fragments/911-Improve-job-query-performance.yml b/changelogs/fragments/911-Improve-job-query-performance.yml new file mode 100644 index 000000000..a6722636e --- /dev/null +++ b/changelogs/fragments/911-Improve-job-query-performance.yml @@ -0,0 +1,4 @@ +minor_changes: +- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. + This change removes those resulting in a performance increase in job related queries. + (https://github.com/ansible-collections/ibm_zos_core/pull/911) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 391583b75..3a9c3b35e 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -137,6 +137,8 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): owner {str} -- The owner of the job (default: {None}) job_name {str} -- The job name search for (default: {None}) dd_name {str} -- If populated, return ONLY this DD in the job list (default: {None}) + note: no routines call job_status with dd_name, so we are speeding this routine with + 'dd_scan=False' Returns: list[dict] -- The status information for a list of jobs matching search criteria. @@ -148,26 +150,24 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): job_id=dict(arg_type="str"), owner=dict(arg_type="qualifier_pattern"), job_name=dict(arg_type="str"), - dd_name=dict(arg_type="str"), ) parser = BetterArgParser(arg_defs) parsed_args = parser.parse_args( - {"job_id": job_id, "owner": owner, "job_name": job_name, "dd_name": dd_name} + {"job_id": job_id, "owner": owner, "job_name": job_name} ) job_id = parsed_args.get("job_id") or "*" job_name = parsed_args.get("job_name") or "*" owner = parsed_args.get("owner") or "*" - dd_name = parsed_args.get("dd_name") - job_status_result = _get_job_status(job_id, owner, job_name, dd_name) + job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False) if len(job_status_result) == 0: job_id = "" if job_id == "*" else job_id job_name = "" if job_name == "*" else job_name owner = "" if owner == "*" else owner - job_status_result = _get_job_status(job_id, owner, job_name, dd_name) + job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False) return job_status_result @@ -195,7 +195,7 @@ def _parse_steps(job_str): return stp -def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration=0, timeout=0, start_time=timer()): +def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): if job_id == "*": job_id_temp = None else: @@ -210,14 +210,20 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # creationdatetime=job[9] queueposition=job[10] # starting in zoau 1.2.4, program_name[11] was added. + # Testing has shown that the program_name impact is minor, so we're removing that option + # This will also help maintain compatibility with 1.2.3 + final_entries = [] - entries = listing(job_id=job_id_temp) + kwargs = { + "job_id": job_id_temp, + } + entries = listing(**kwargs) while ((entries is None or len(entries) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - entries = listing(job_id=job_id_temp) + entries = listing(**kwargs) if entries: for entry in entries: @@ -249,15 +255,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= # this section only works on zoau 1.2.3/+ vvv - if ZOAU_API_VERSION > "1.2.2" and ZOAU_API_VERSION < "1.2.4": - job["job_class"] = entry.job_class - job["svc_class"] = entry.svc_class - job["priority"] = entry.priority - job["asid"] = entry.asid - job["creation_date"] = str(entry.creation_datetime)[0:10] - job["creation_time"] = str(entry.creation_datetime)[12:] - job["queue_position"] = entry.queue_position - elif ZOAU_API_VERSION >= "1.2.4": + if ZOAU_API_VERSION > "1.2.2": job["job_class"] = entry.job_class job["svc_class"] = entry.svc_class job["priority"] = entry.priority @@ -265,6 +263,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["creation_date"] = str(entry.creation_datetime)[0:10] job["creation_time"] = str(entry.creation_datetime)[12:] job["queue_position"] = entry.queue_position + if ZOAU_API_VERSION >= "1.2.4": job["program_name"] = entry.program_name # this section only works on zoau 1.2.3/+ ^^^ @@ -274,93 +273,95 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, duration= job["ret_code"]["steps"] = [] job["ddnames"] = [] - list_of_dds = list_dds(entry.id) - while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): - current_time = timer() - duration = round(current_time - start_time) - sleep(1) + if dd_scan: list_of_dds = list_dds(entry.id) + while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): + current_time = timer() + duration = round(current_time - start_time) + sleep(1) + list_of_dds = list_dds(entry.id) + + for single_dd in list_of_dds: + dd = {} + + # If dd_name not None, only that specific dd_name should be returned + if dd_name is not None: + if dd_name not in single_dd["dataset"]: + continue + else: + dd["ddname"] = single_dd["dataset"] + + if "dataset" not in single_dd: + continue - for single_dd in list_of_dds: - dd = {} + if "recnum" in single_dd: + dd["record_count"] = single_dd["recnum"] + else: + dd["record_count"] = None - # If dd_name not None, only that specific dd_name should be returned - if dd_name is not None: - if dd_name not in single_dd["dataset"]: - continue + if "dsid" in single_dd: + dd["id"] = single_dd["dsid"] else: - dd["ddname"] = single_dd["dataset"] + dd["id"] = "?" - if "dataset" not in single_dd: - continue + if "stepname" in single_dd: + dd["stepname"] = single_dd["stepname"] + else: + dd["stepname"] = None + + if "procstep" in single_dd: + dd["procstep"] = single_dd["procstep"] + else: + dd["proctep"] = None - if "recnum" in single_dd: - dd["record_count"] = single_dd["recnum"] - else: - dd["record_count"] = None - - if "dsid" in single_dd: - dd["id"] = single_dd["dsid"] - else: - dd["id"] = "?" - - if "stepname" in single_dd: - dd["stepname"] = single_dd["stepname"] - else: - dd["stepname"] = None - - if "procstep" in single_dd: - dd["procstep"] = single_dd["procstep"] - else: - dd["proctep"] = None - - if "length" in single_dd: - dd["byte_count"] = single_dd["length"] - else: - dd["byte_count"] = 0 - - tmpcont = None - if "stepname" in single_dd: - if "dataset" in single_dd: - tmpcont = read_output( - entry.id, single_dd["stepname"], single_dd["dataset"]) - - dd["content"] = tmpcont.split("\n") - job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) - - job["ddnames"].append(dd) - if len(job["class"]) < 1: - if "- CLASS " in tmpcont: - tmptext = tmpcont.split("- CLASS ")[1] - job["class"] = tmptext.split(" ")[0] - - if len(job["system"]) < 1: - if "-- S Y S T E M " in tmpcont: - tmptext = tmpcont.split("-- S Y S T E M ")[1] - job["system"] = (tmptext.split( - "--", 1)[0]).replace(" ", "") - - if len(job["subsystem"]) < 1: - if "-- N O D E " in tmpcont: - tmptext = tmpcont.split("-- N O D E ")[1] - job["subsystem"] = (tmptext.split("\n")[ - 0]).replace(" ", "") - - # Extract similar: "19.49.44 JOB06848 IEFC452I DOCEASYT - JOB NOT RUN - JCL ERROR 029 " - # then further reduce down to: 'JCL ERROR 029' - if job["ret_code"]["msg_code"] == "?": - if "JOB NOT RUN -" in tmpcont: - tmptext = tmpcont.split( - "JOB NOT RUN -")[1].split("\n")[0] - job["ret_code"]["msg"] = tmptext.strip() - job["ret_code"]["msg_code"] = None - job["ret_code"]["code"] = None - if len(list_of_dds) > 0: - # The duration should really only be returned for job submit but the code - # is used job_output as well, for now we can ignore this point unless - # we want to offer a wait_time_s for job output which might be reasonable. - job["duration"] = duration - final_entries.append(job) + if "length" in single_dd: + dd["byte_count"] = single_dd["length"] + else: + dd["byte_count"] = 0 + + tmpcont = None + if "stepname" in single_dd: + if "dataset" in single_dd: + tmpcont = read_output( + entry.id, single_dd["stepname"], single_dd["dataset"]) + + dd["content"] = tmpcont.split("\n") + job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) + + job["ddnames"].append(dd) + if len(job["class"]) < 1: + if "- CLASS " in tmpcont: + tmptext = tmpcont.split("- CLASS ")[1] + job["class"] = tmptext.split(" ")[0] + + if len(job["system"]) < 1: + if "-- S Y S T E M " in tmpcont: + tmptext = tmpcont.split("-- S Y S T E M ")[1] + job["system"] = (tmptext.split( + "--", 1)[0]).replace(" ", "") + + if len(job["subsystem"]) < 1: + if "-- N O D E " in tmpcont: + tmptext = tmpcont.split("-- N O D E ")[1] + job["subsystem"] = (tmptext.split("\n")[ + 0]).replace(" ", "") + + # Extract similar: "19.49.44 JOB06848 IEFC452I DOCEASYT - JOB NOT RUN - JCL ERROR 029 " + # then further reduce down to: 'JCL ERROR 029' + if job["ret_code"]["msg_code"] == "?": + if "JOB NOT RUN -" in tmpcont: + tmptext = tmpcont.split( + "JOB NOT RUN -")[1].split("\n")[0] + job["ret_code"]["msg"] = tmptext.strip() + job["ret_code"]["msg_code"] = None + job["ret_code"]["code"] = None + if len(list_of_dds) > 0: + # The duration should really only be returned for job submit but the code + # is used job_output as well, for now we can ignore this point unless + # we want to offer a wait_time_s for job output which might be reasonable. + job["duration"] = duration + + final_entries.append(job) if not final_entries: final_entries = _job_not_found(job_id, owner, job_name, "unavailable") return final_entries diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 636698b3b..3803acc2c 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -214,6 +214,7 @@ program_name: description: The name of the program found in the job's last completed step found in the PGM parameter. + Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. type: str sample: "IEBGENER" ret_code: diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 431e06f02..283467766 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -213,6 +213,7 @@ program_name: description: The name of the program found in the job's last completed step found in the PGM parameter. + Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. type: str sample: "IEBGENER" diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 994f4147d..efdbd07d6 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -319,6 +319,7 @@ program_name: description: The name of the program found in the job's last completed step found in the PGM parameter. + Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. type: str sample: "IEBGENER" From 5005283753836c3c4660c7403b37648d581f450d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 12:57:17 -0700 Subject: [PATCH 147/495] Update Readme with new collection content Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 5d48210a9..5cbd6fd98 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,8 @@ executing operator commands, executing TSO commands, ping, querying operator actions, APF authorizing libraries, editing textual data in data sets or Unix System Services files, finding data sets, backing up and restoring data sets and -volumes, mounting file systems, running z/OS programs without JCL and -initializing volumes. +volumes, mounting file systems, running z/OS programs without JCL, +initializing volumes, archiving, unarchiving and templating with Jinja. Red Hat Ansible Certified Content for IBM Z From 1f0cb177c8ed2a0405ebbc120027b97ee15d2bd8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 13:09:07 -0700 Subject: [PATCH 148/495] Update copyright yrs Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/zoau_version_checker.py | 2 +- plugins/modules/zos_job_output.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index a5fff7196..c88dac481 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 3803acc2c..ec4aa0313 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022 +# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From b4ef982b5570841a351dad4d4916ab604ede3227 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 13:57:34 -0700 Subject: [PATCH 149/495] Generated doc updates Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_blockinfile.rst | 2 +- docs/source/modules/zos_job_output.rst | 2 +- docs/source/modules/zos_job_query.rst | 2 +- docs/source/modules/zos_job_submit.rst | 2 +- docs/source/modules/zos_lineinfile.rst | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 3633620ad..e1e11486c 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -312,7 +312,7 @@ Return Values changed - Indicates if the source was modified + Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index 76ae0364c..efea6ea2a 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -381,7 +381,7 @@ jobs | **sample**: 3 program_name - The name of the program found in the job's last completed step found in the PGM parameter. + The name of the program found in the job's last completed step found in the PGM parameter. Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. | **type**: str | **sample**: IEBGENER diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 519f5801a..ea320dfc3 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -288,7 +288,7 @@ jobs | **sample**: 3 program_name - The name of the program found in the job's last completed step found in the PGM parameter. + The name of the program found in the job's last completed step found in the PGM parameter. Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. | **type**: str | **sample**: IEBGENER diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 8d5b8ecef..e0fd8e2d1 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -766,7 +766,7 @@ jobs | **sample**: 3 program_name - The name of the program found in the job's last completed step found in the PGM parameter. + The name of the program found in the job's last completed step found in the PGM parameter. Returned when Z Open Automation Utilities (ZOAU) is 1.2.4 or later. | **type**: str | **sample**: IEBGENER diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index 89ebcc805..e352007df 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -269,7 +269,7 @@ Return Values changed - Indicates if the source was modified + Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. | **returned**: success | **type**: bool From 55740ea292776eef8375161fa32fa138fec1e0c3 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 14:00:17 -0700 Subject: [PATCH 150/495] Update module doc to clear the boolean value comes back as 1 or 0 Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_blockinfile.py | 4 +++- plugins/modules/zos_lineinfile.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 014382f1e..7484d93ec 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -283,7 +283,9 @@ RETURN = r""" changed: - description: Indicates if the source was modified + description: + Indicates if the source was modified. + Value of 1 represents `true`, otherwise `false`. returned: success type: bool sample: 1 diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index c2a7a719c..6536509fd 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -242,7 +242,9 @@ RETURN = r""" changed: - description: Indicates if the source was modified + description: + Indicates if the source was modified. + Value of 1 represents `true`, otherwise `false`. returned: success type: bool sample: 1 From 438397e32b845e08579a21fcd352c017a36e0de5 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 14:49:10 -0700 Subject: [PATCH 151/495] Move a fragment that was outside the changlog folder Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/771-update-ansible-version.yaml | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 changelogs/771-update-ansible-version.yaml diff --git a/changelogs/771-update-ansible-version.yaml b/changelogs/771-update-ansible-version.yaml deleted file mode 100644 index 92354841b..000000000 --- a/changelogs/771-update-ansible-version.yaml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: -- doc - Updated the documentation in the README and release_notes.rst to reflect - ansible, ansible-core, Automation Hub and z/OS version. - (https://github.com/ansible-collections/ibm_zos_core/pull/771) -- templates - Update the git issue templates with current and - future product versions. - (https://github.com/ansible-collections/ibm_zos_core/pull/771) \ No newline at end of file From 04bc4ab54dd435a5b45221af0e21a108da2c12a6 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 14:56:11 -0700 Subject: [PATCH 152/495] Changelog fragment lint error corrections and summary added Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/696-zos-copy-remove-emergency-backup.yml | 2 +- changelogs/fragments/771-update-ansible-version.yaml | 7 +++++++ ...tion_attributes_had_hardcoded_type_and_recordformat.yml | 2 +- changelogs/fragments/840-redesign-test-cases.yml | 2 +- changelogs/fragments/v1.7.0-beta.1_summary.yml | 6 ++++++ 5 files changed, 16 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/771-update-ansible-version.yaml create mode 100644 changelogs/fragments/v1.7.0-beta.1_summary.yml diff --git a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml index b86a18d82..d9924cb2d 100644 --- a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml +++ b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml @@ -1,4 +1,4 @@ -enhancements: +major_changes: - zos_copy - Previously, backups were taken when force was set to false; whether or not a user specified this operation which caused allocation issues with space and permissions. This removes the automatic backup performed and diff --git a/changelogs/fragments/771-update-ansible-version.yaml b/changelogs/fragments/771-update-ansible-version.yaml new file mode 100644 index 000000000..92354841b --- /dev/null +++ b/changelogs/fragments/771-update-ansible-version.yaml @@ -0,0 +1,7 @@ +trivial: +- doc - Updated the documentation in the README and release_notes.rst to reflect + ansible, ansible-core, Automation Hub and z/OS version. + (https://github.com/ansible-collections/ibm_zos_core/pull/771) +- templates - Update the git issue templates with current and + future product versions. + (https://github.com/ansible-collections/ibm_zos_core/pull/771) \ No newline at end of file diff --git a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml index 8f4246f85..5b4e14aa8 100644 --- a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml +++ b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml @@ -1,4 +1,4 @@ -bugfix: +bugfixes: - zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. diff --git a/changelogs/fragments/840-redesign-test-cases.yml b/changelogs/fragments/840-redesign-test-cases.yml index 8b9c2aee0..c998eeee4 100644 --- a/changelogs/fragments/840-redesign-test-cases.yml +++ b/changelogs/fragments/840-redesign-test-cases.yml @@ -1,7 +1,7 @@ trivial: - zos_lininfile - Adjust test cases to be in one document and clearer to follow. - zos_blockinfile - Adjust test cases to be in one document and clearer to follow. -bugfix: +bugfixes: - zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) \ No newline at end of file diff --git a/changelogs/fragments/v1.7.0-beta.1_summary.yml b/changelogs/fragments/v1.7.0-beta.1_summary.yml new file mode 100644 index 000000000..727e3da75 --- /dev/null +++ b/changelogs/fragments/v1.7.0-beta.1_summary.yml @@ -0,0 +1,6 @@ +release_summary: | + Release Date: '2023-07-26' + This changelog describes all changes made to the modules and plugins included + in this collection. The release date is the date the changelog is created. + For additional details such as required dependencies and availability review + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file From 68855465a17928ac56d594a954575f446b3be624 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 16:04:26 -0700 Subject: [PATCH 153/495] Changelog generated Signed-off-by: ddimatos <dimatos@gmail.com> --- CHANGELOG.rst | 59 +++++++++++++++++-- changelogs/.plugin-cache.yaml | 12 +++- changelogs/changelog.yaml | 106 ++++++++++++++++++++++++++++++++++ 3 files changed, 172 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7cf358b23..826161e56 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,57 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.7.0-beta.1 +============= + +Release Summary +--------------- + +Release Date: '2023-07-26' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Major Changes +------------- + +- zos_copy - Previously, backups were taken when force was set to false; whether or not a user specified this operation which caused allocation issues with space and permissions. This removes the automatic backup performed and reverts to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) + +Minor Changes +------------- + +- Add support for Jinja2 templates in zos_copy and zos_job_submit when using local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) +- zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, space_type and type in the return output when the destination data set does not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) +- zos_data_set - record format = 'F' has been added to support 'fixed' block records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) +- zos_job_output - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, and queue_position to the return output when querying or submitting a job. Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) +- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. This change removes those resulting in a performance increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) +- zos_job_query - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_submit - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + +Bugfixes +-------- + +- module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) +- zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) +- zos_copy - Reported a false positive such that the response would have `changed=true` when copying from a source (src) or destination (dest) data set that was in use (DISP=SHR). This change now displays an appropriate error message and returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). +- zos_copy - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. This change ensures proper test coverage for nested directories and file permissions. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. Fix now encodes all content in a source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). +- zos_copy - kept permissions on target directory when copy overwrote files. The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) +- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_fetch - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_job_output - Error message did not specify the job not found. Fix now specifies the job_id or job_name being searched to ensure more information is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) +- zos_operator - Reported a failure caused by unrelated error response. Fix now gives a transparent response of the operator to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/762). + +New Modules +----------- + +- ibm.ibm_zos_core.zos_archive - Archive files and data sets on z/OS. +- ibm.ibm_zos_core.zos_unarchive - Unarchive files and data sets in z/OS. + v1.6.0 ====== @@ -26,11 +77,11 @@ Minor Changes ------------- - Updated the text converter import from "from ansible.module_utils._text" to "from ansible.module_utils.common.text.converters" to remove warning".. warn Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) -- module_utils - job.py utility did not support positional wild card placement, this enhancement uses `fnmatch` logic to support wild cards. +- module_utils - job.py utility did not support positional wiled card placement, this enhancement uses `fnmatch` logic to support wild cards. - zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) - zos_copy - was enhanced to keep track of modified members in a destination dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) -- zos_data_set - add force parameter to enable member delete while PDS/e is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). -- zos_job_query - ansible module does not support positional wild card placement for `job_name` or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) +- zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). +- zos_job_query - ansible module does not support positional wild card placement for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) - zos_lineinfile - would access data sets with exclusive access so no other task can read the data, this enhancement allows for a data set to be opened with a disposition set to share so that other tasks can access the data when option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) - zos_tso_command - was enhanced to accept `max_rc` as an option. This option allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) @@ -39,7 +90,7 @@ Bugfixes - Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/816). -- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an appropriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) +- zos_blockinfile - was unable to use double quotes which prevented some use cases and did not display an approriate message. The fix now allows for double quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) - zos_copy - Encoding normalization used to handle newlines in text files was applied to binary files too. Fix makes sure that binary files bypass this normalization. (https://github.com/ansible-collections/ibm_zos_core/pull/810) - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. Issue 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) - zos_copy - Fixes a bug where the code for fixing an issue with newlines in files (issue 599) would use the wrong encoding for normalization. Issue 678. (https://github.com/ansible-collections/ibm_zos_core/pull/743) diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index fbc11cf4b..3520dc55a 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -16,6 +16,11 @@ plugins: name: zos_apf namespace: '' version_added: 1.3.0 + zos_archive: + description: Archive files and data sets on z/OS. + name: zos_archive + namespace: '' + version_added: 1.7.0 zos_backup_restore: description: Backup and restore data sets and volumes name: zos_backup_restore @@ -106,6 +111,11 @@ plugins: name: zos_tso_command namespace: '' version_added: 1.1.0 + zos_unarchive: + description: Unarchive files and data sets in z/OS. + name: zos_unarchive + namespace: '' + version_added: 1.7.0 zos_volume_init: description: Initialize volumes or minidisks. name: zos_volume_init @@ -116,4 +126,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.6.0 +version: 1.7.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 37049f8df..5f4da9de0 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -875,3 +875,109 @@ releases: name: zos_volume_init namespace: '' release_date: '2023-04-26' + 1.7.0-beta.1: + changes: + bugfixes: + - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM + data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). + - zos_blockinfile - Test case generate a data set that was not correctly removed. + Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) + - zos_copy - Module returned the dynamic values created with the same dataset + type and record format. Fix validate the correct dataset type and record format + of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) + - zos_copy - Reported a false positive such that the response would have `changed=true` + when copying from a source (src) or destination (dest) data set that was in + use (DISP=SHR). This change now displays an appropriate error message and + returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). + - zos_copy - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). + - zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. + This change ensures proper test coverage for nested directories and file permissions. + (https://github.com/ansible-collections/ibm_zos_core/pull/806). + - zos_copy - Zos_copy did not encode inner content inside subdirectories once + the source was copied to the destination. Fix now encodes all content in a + source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). + - zos_copy - kept permissions on target directory when copy overwrote files. + The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) + - zos_data_set - Reported a failure caused when `present=absent` for a VSAM + data set leaving behind cluster components. Fix introduces a new logical flow + that will evaluate the volumes, compare it to the provided value and if necessary + catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). + - zos_fetch - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). + - zos_job_output - Error message did not specify the job not found. Fix now + specifies the job_id or job_name being searched to ensure more information + is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) + - zos_operator - Reported a failure caused by unrelated error response. Fix + now gives a transparent response of the operator to avoid false negatives. + (https://github.com/ansible-collections/ibm_zos_core/pull/762). + major_changes: + - zos_copy - Previously, backups were taken when force was set to false; whether + or not a user specified this operation which caused allocation issues with + space and permissions. This removes the automatic backup performed and reverts + to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) + minor_changes: + - Add support for Jinja2 templates in zos_copy and zos_job_submit when using + local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) + - zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, + space_type and type in the return output when the destination data set does + not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) + - zos_data_set - record format = 'F' has been added to support 'fixed' block + records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) + - zos_job_output - zoau added 'program_name' to their field output starting + with v1.2.4. This enhancement checks for that version and passes the extra + column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + - zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, + and queue_position to the return output when querying or submitting a job. + Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) + - zos_job_query - unnecessary calls were made to find a jobs DDs that incurred + unnecessary overhead. This change removes those resulting in a performance + increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) + - zos_job_query - zoau added 'program_name' to their field output starting with + v1.2.4. This enhancement checks for that version and passes the extra column + through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + - zos_job_submit - zoau added 'program_name' to their field output starting + with v1.2.4. This enhancement checks for that version and passes the extra + column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + release_summary: 'Release Date: ''2023-07-26'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 439-add-f-recordtype.yml + - 667-template-support.yml + - 696-zos-copy-remove-emergency-backup.yml + - 747-failed_when_the_job_name_was_null_or_not_found.yaml + - 762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml + - 766-ac-command-replace-makefile.yml + - 771-update-ansible-version.yaml + - 772-Encode-files-recursively-and-test-case-for-keep-behavior.yml + - 773-return-dynamically-created-dest-attrs.yaml + - 778-query-new-fields.yml + - 789-ac-command-add-test.yml + - 789-ac-command-updates.yml + - 791-zos_data_set-update-vsam-copy.yml + - 794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml + - 795_overwrite_permissions_on_copy.yml + - 806-zos_copy_fetch-display-verbose.yml + - 812-ansible-lint.yml + - 824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml + - 839-Add-Field-to-zos-job-query.yml + - 840-redesign-test-cases.yml + - 880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml + - 906-update-doc-generation.yml + - 911-Improve-job-query-performance.yml + - v1.7.0-beta.1_summary.yml + modules: + - description: Archive files and data sets on z/OS. + name: zos_archive + namespace: '' + - description: Unarchive files and data sets in z/OS. + name: zos_unarchive + namespace: '' + release_date: '2023-07-26' From c8a6facdfeb6088e96c26e43c85a1c08c497fabc Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Wed, 26 Jul 2023 22:40:04 -0700 Subject: [PATCH 154/495] release notes Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 80 +++++++++++++++++++++++++++++++++-- 1 file changed, 76 insertions(+), 4 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 1e211ec89..e512de025 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,22 +6,85 @@ Releases ======== -Version 1.6.0-beta.1 +Version 1.7.0-beta.1 ==================== New Modules ----------- +- ``zos_archive`` - archive files, data sets and extend archives on z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. +- ``zos_unarchive`` - unarchive files and data sets in z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. + +Major Changes +------------- + +-- ``zos_copy`` and ``zos_job_submit`` - supports Jinja2 templating which is essential for handling tasks that require advanced file modifications such as JCL. + +Minor Changes +------------- +- ``zos_copy`` + + - displays the data set attributes when the destination does not exist and was created by the module. + - reverts the logic that would automatically create backups in the event of a module failure leaving it up to the user to decide if a backup is needed. +- ``zos_data_set`` - supports record format *F* (fixed) where one physical block on disk is one logical record and all the blocks and records are the same size. +- ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). +- ``zos_job_query`` + - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). + - removes unnecessary queries to find DDs improving the modules performance. +- ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). + +Bugfixes +-------- +- ``zos_data_set`` - fixes occasionally occurring orphaned VSAM cluster components such as INDEX when `present=absent`. +- ``zos_fetch`` - fixes the warning that appeared about the use of _play_context.verbosity. +- ``zos_copy`` + + - fixes the warning that appeared about the use of _play_context.verbosity. + - fixes an issue where subdirectories would not be encoded. + - fixes an issue where when mode was set, the mode was not applied to existing directories and files. + - displays a error message when copying into a data set that is being accessed by another process and no longer returns with `changed=true`. + +``zos_job_output`` - displays an appropriate error message for a job is not found in the spool. +``zos_operator`` - fixes the false reports that a command failed when keywords such as *error* were seen, the module now acts as a passthrough. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS V2R3`_ or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. + +Version 1.6.0 +============= + +New Modules +----------- + - ``zos_volume_init`` - Can initialize volumes or minidisks on target z/OS systems which includes creating a volume label and an entry into the volume table of contents (VTOC). Minor Changes ------------- - ``zos_blockinfile`` - Adds an enhancement to allow double quotes within a block. +- ``zos_copy`` + + - Updates the behavior of the `mode` option so that permissions are applied to existing directories and contents. + - Adds an enhancement to option `restore_backup` to track modified members in a data set in the event of an error, restoring them to their previous state without reallocating the data set. - ``zos_data_set`` - Adds a new option named *force* to enable deletion of a data member in a PDSE that is simultaneously in use by others. - ``zos_job_query`` - Enables embedded positional wild card placement throughout *job_name* and *job_id* parameters. - ``zos_lineinfile`` - Adds a new option named *force* to enable modification of a data member in a data set that is simultaneously in use by others. - ``zos_tso_command`` - Adds a new option named *max_rc* to enable non-zero return codes lower than the specified maximum return as succeeded. +- ``module_utils`` + + - job - Adds support for positional wild card placement for `job_name`` and `job_id`. + - Adds support for import *common.text.converters* over the deprecated *_text* import. Bugfixes -------- @@ -31,12 +94,20 @@ Bugfixes - Fixes a bug where files not encoded in IBM-1047 would trigger an error while computing the record length for a new destination dataset. - Fixes a bug where the module would change the mode for a directory when copying in the contents of another directory. - Fixes a bug where the incorrect encoding would be used during normalization, particularly when processing newlines in files. + - Fixes a bug where binary files were not excluded when normalizing data to remove newlines. + - Fixes a bug where a *_play_context.verbosity* deprecation warning would appear. +- ``zos_fetch`` - Fixes a bug where a *_play_context.verbosity* deprecation warning would appear. - ``zos_encode`` - Fixes a bug where converted files were not tagged with the new code set afterwards. - ``zos_find`` - Fixes a bug where the module would stop searching and exit after the first value in a list was not found. - ``zos_lineinfile`` - Removes use of Python f-string to ensure support for Python 2.7 on the controller. - - Fixes a bug where an incorect error message would be raised when a USS source was not found. + - Fixes a bug where an incorrect error message would be raised when a USS source was not found. +- ``module_utils`` + + - data_set - Fixes an failure caused by cataloging a VSAM data set when the data set is not cataloged. +- ``zos_data_set`` - Fixes a bug that will leave VSAM data set cluster components behind when instructed to delete the data set (`present=absent`). +- ``zos_gather_facts`` - Fixes a bug that prevented the module from executing with newer versions of ZOAU. Availability ------------ @@ -48,12 +119,11 @@ Availability Reference --------- -* Supported by `z/OS Version`_ V2R4 or later +* Supported by `z/OS V2R3`_ or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. - Version 1.5.0 ============= @@ -853,6 +923,8 @@ Reference https://www.ibm.com/docs/en/zoau/1.1.1 .. _Z Open Automation Utilities 1.2.2: https://www.ibm.com/docs/en/zoau/1.2.x +.. _Z Open Automation Utilities 1.2.3: + https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS V2R3: From 991b3a583e866caee5f42841eee52987ac009b55 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 27 Jul 2023 21:57:24 -0700 Subject: [PATCH 155/495] Delete changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/309-replace-text-zos-encode.yml | 4 --- ...os-job-query-handle-multiple-wildcards.yml | 7 ---- ...8-zos-data-set-support-disposition-shr.yml | 2 -- .../408-restore-members-on-failure.yml | 4 --- ...can-quotes-in-content-can-be-supported.yml | 5 --- changelogs/fragments/439-add-f-recordtype.yml | 4 --- .../574-zos_find_stoppedonnotfound.yml | 4 --- .../584-zos_lineinfile-error-message.yml | 2 -- .../fragments/602-text-converter-import.yml | 6 ---- ...es-is-applied-to-destination-directory.yml | 3 -- .../654-new-module-zos_volume_init.yml | 2 -- .../fragments/659-zos-lineinfile-f-string.yml | 8 ----- .../663-zos_gather_facts-update-docstring.yml | 2 -- .../fragments/666-zos_tso_command_maxrc.yml | 4 --- changelogs/fragments/667-template-support.yml | 4 --- .../fragments/683-zos_job_submit-bugs.yml | 35 ------------------- .../696-zos-copy-remove-emergency-backup.yml | 6 ---- .../727-zos-blockinfile-examples.yml | 5 --- .../729-zos_operator-example-added.yml | 4 --- .../731-zos_linefile-disposition_share.yaml | 6 ---- .../734-copy-loadlib-member-test-case.yml | 4 --- .../740-zos_copy-volume-symbol-test.yml | 5 --- .../fragments/743-zos_copy-encoding-bugs.yml | 9 ----- ...en_the_job_name_was_null_or_not_found.yaml | 5 --- ...re-caused-by-unrelated-error-response.yaml | 4 --- .../766-ac-command-replace-makefile.yml | 4 --- .../fragments/771-update-ansible-version.yaml | 7 ---- ...sively-and-test-case-for-keep-behavior.yml | 5 --- ...return-dynamically-created-dest-attrs.yaml | 6 ---- changelogs/fragments/778-query-new-fields.yml | 5 --- .../fragments/789-ac-command-add-test.yml | 3 -- .../fragments/789-ac-command-updates.yml | 3 -- .../791-zos_data_set-update-vsam-copy.yml | 11 ------ ...or_message_when_concurrent_copy_fails.yaml | 6 ---- .../795_overwrite_permissions_on_copy.yml | 4 --- .../806-zos_copy_fetch-display-verbose.yml | 17 --------- changelogs/fragments/812-ansible-lint.yml | 4 --- ...es_had_hardcoded_type_and_recordformat.yml | 5 --- .../839-Add-Field-to-zos-job-query.yml | 10 ------ .../fragments/840-redesign-test-cases.yml | 7 ---- ..._currently_can_not_be_run_concurrently.yml | 6 ---- .../fragments/906-update-doc-generation.yml | 4 --- .../911-Improve-job-query-performance.yml | 4 --- .../fragments/v1.7.0-beta.1_summary.yml | 6 ---- 44 files changed, 261 deletions(-) delete mode 100644 changelogs/fragments/309-replace-text-zos-encode.yml delete mode 100644 changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml delete mode 100644 changelogs/fragments/358-zos-data-set-support-disposition-shr.yml delete mode 100644 changelogs/fragments/408-restore-members-on-failure.yml delete mode 100644 changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml delete mode 100644 changelogs/fragments/439-add-f-recordtype.yml delete mode 100644 changelogs/fragments/574-zos_find_stoppedonnotfound.yml delete mode 100644 changelogs/fragments/584-zos_lineinfile-error-message.yml delete mode 100644 changelogs/fragments/602-text-converter-import.yml delete mode 100644 changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml delete mode 100644 changelogs/fragments/654-new-module-zos_volume_init.yml delete mode 100644 changelogs/fragments/659-zos-lineinfile-f-string.yml delete mode 100644 changelogs/fragments/663-zos_gather_facts-update-docstring.yml delete mode 100644 changelogs/fragments/666-zos_tso_command_maxrc.yml delete mode 100644 changelogs/fragments/667-template-support.yml delete mode 100644 changelogs/fragments/683-zos_job_submit-bugs.yml delete mode 100644 changelogs/fragments/696-zos-copy-remove-emergency-backup.yml delete mode 100644 changelogs/fragments/727-zos-blockinfile-examples.yml delete mode 100644 changelogs/fragments/729-zos_operator-example-added.yml delete mode 100644 changelogs/fragments/731-zos_linefile-disposition_share.yaml delete mode 100644 changelogs/fragments/734-copy-loadlib-member-test-case.yml delete mode 100644 changelogs/fragments/740-zos_copy-volume-symbol-test.yml delete mode 100644 changelogs/fragments/743-zos_copy-encoding-bugs.yml delete mode 100644 changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml delete mode 100644 changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml delete mode 100644 changelogs/fragments/766-ac-command-replace-makefile.yml delete mode 100644 changelogs/fragments/771-update-ansible-version.yaml delete mode 100644 changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml delete mode 100644 changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml delete mode 100644 changelogs/fragments/778-query-new-fields.yml delete mode 100644 changelogs/fragments/789-ac-command-add-test.yml delete mode 100644 changelogs/fragments/789-ac-command-updates.yml delete mode 100644 changelogs/fragments/791-zos_data_set-update-vsam-copy.yml delete mode 100644 changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml delete mode 100644 changelogs/fragments/795_overwrite_permissions_on_copy.yml delete mode 100644 changelogs/fragments/806-zos_copy_fetch-display-verbose.yml delete mode 100644 changelogs/fragments/812-ansible-lint.yml delete mode 100644 changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml delete mode 100644 changelogs/fragments/839-Add-Field-to-zos-job-query.yml delete mode 100644 changelogs/fragments/840-redesign-test-cases.yml delete mode 100644 changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml delete mode 100644 changelogs/fragments/906-update-doc-generation.yml delete mode 100644 changelogs/fragments/911-Improve-job-query-performance.yml delete mode 100644 changelogs/fragments/v1.7.0-beta.1_summary.yml diff --git a/changelogs/fragments/309-replace-text-zos-encode.yml b/changelogs/fragments/309-replace-text-zos-encode.yml deleted file mode 100644 index b4ba2b53d..000000000 --- a/changelogs/fragments/309-replace-text-zos-encode.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_encode - fixes a bug where converted files were not tagged afterwards - with the new code set. - (https://github.com/ansible-collections/ibm_zos_core/pull/534) \ No newline at end of file diff --git a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml b/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml deleted file mode 100644 index 060df2fb1..000000000 --- a/changelogs/fragments/323-zos-job-query-handle-multiple-wildcards.yml +++ /dev/null @@ -1,7 +0,0 @@ -minor_changes: -- zos_job_query - ansible module does not support positional wild card placement - for `job_name1 or `job_id`. This enhancement allows embedded wildcards - throughout the `job_name` and `job_id`. - (https://github.com/ansible-collections/ibm_zos_core/pull/721) -- module_utils - job.py utility did not support positional wiled card placement, - this enhancement uses `fnmatch` logic to support wild cards. diff --git a/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml b/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml deleted file mode 100644 index 4102bab0d..000000000 --- a/changelogs/fragments/358-zos-data-set-support-disposition-shr.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - zos_data_set - add force parameter to enable member delete while pdse is in use (https://github.com/ansible-collections/ibm_zos_core/pull/718). \ No newline at end of file diff --git a/changelogs/fragments/408-restore-members-on-failure.yml b/changelogs/fragments/408-restore-members-on-failure.yml deleted file mode 100644 index 3e6c50d12..000000000 --- a/changelogs/fragments/408-restore-members-on-failure.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: -- zos_copy - was enhanced to keep track of modified members in a destination - dataset, restoring them to their previous state in case of a failure. - (https://github.com/ansible-collections/ibm_zos_core/pull/551) \ No newline at end of file diff --git a/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml b/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml deleted file mode 100644 index ebd99af7a..000000000 --- a/changelogs/fragments/417-can-quotes-in-content-can-be-supported.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: -- zos_blockinfile - was unable to use double quotes which prevented some use - cases and did not display an approriate message. The fix now allows for - double quotes to be used with the module. - (https://github.com/ansible-collections/ibm_zos_core/pull/680) \ No newline at end of file diff --git a/changelogs/fragments/439-add-f-recordtype.yml b/changelogs/fragments/439-add-f-recordtype.yml deleted file mode 100644 index 6c5e72f49..000000000 --- a/changelogs/fragments/439-add-f-recordtype.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: -- zos_data_set - record format = 'F' has been added to support 'fixed' block records. - This allows records that can use the entire block. - (https://github.com/ansible-collections/ibm_zos_core/pull/821) \ No newline at end of file diff --git a/changelogs/fragments/574-zos_find_stoppedonnotfound.yml b/changelogs/fragments/574-zos_find_stoppedonnotfound.yml deleted file mode 100644 index 48eebe523..000000000 --- a/changelogs/fragments/574-zos_find_stoppedonnotfound.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_find - fixes a bug where find result values stopped being returned after - first value in a list was 'not found'. - (https://github.com/ansible-collections/ibm_zos_core/pull/668) diff --git a/changelogs/fragments/584-zos_lineinfile-error-message.yml b/changelogs/fragments/584-zos_lineinfile-error-message.yml deleted file mode 100644 index fad485765..000000000 --- a/changelogs/fragments/584-zos_lineinfile-error-message.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: -- Fixed wrong error message when a USS source is not found, aligning with a similar error message from zos_blockinfile "{src} does not exist". diff --git a/changelogs/fragments/602-text-converter-import.yml b/changelogs/fragments/602-text-converter-import.yml deleted file mode 100644 index 24f719c26..000000000 --- a/changelogs/fragments/602-text-converter-import.yml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: -- Updated the text converter import from "from ansible.module_utils._text" - to "from ansible.module_utils.common.text.converters" to remove - warning".. warn Use ansible.module_utils.common.text.converters instead.". - (https://github.com/ansible-collections/ibm_zos_core/pull/602) - diff --git a/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml b/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml deleted file mode 100644 index 970741107..000000000 --- a/changelogs/fragments/619-Mode-set-for-files-is-applied-to-destination-directory.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: -- zos_copy - Fixed a bug where the module would change the mode for a directory when copying into it the contents of another. - (https://github.com/ansible-collections/ibm_zos_core/pull/723) \ No newline at end of file diff --git a/changelogs/fragments/654-new-module-zos_volume_init.yml b/changelogs/fragments/654-new-module-zos_volume_init.yml deleted file mode 100644 index 41808d718..000000000 --- a/changelogs/fragments/654-new-module-zos_volume_init.yml +++ /dev/null @@ -1,2 +0,0 @@ -major_changes: -- zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. (https://github.com/ansible-collections/ibm_zos_core/pull/654) \ No newline at end of file diff --git a/changelogs/fragments/659-zos-lineinfile-f-string.yml b/changelogs/fragments/659-zos-lineinfile-f-string.yml deleted file mode 100644 index bd5e0b269..000000000 --- a/changelogs/fragments/659-zos-lineinfile-f-string.yml +++ /dev/null @@ -1,8 +0,0 @@ -bugfixes: -- zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed - to ensure support for Python 2.7 on the controller. - (https://github.com/ansible-collections/ibm_zos_core/pull/659) -trivial: -- Remove changelog fragments no longer needed as they are already recorded in - the prior version of IBM z/OS Core. - (https://github.com/ansible-collections/ibm_zos_core/pull/659) \ No newline at end of file diff --git a/changelogs/fragments/663-zos_gather_facts-update-docstring.yml b/changelogs/fragments/663-zos_gather_facts-update-docstring.yml deleted file mode 100644 index d6ba48dd7..000000000 --- a/changelogs/fragments/663-zos_gather_facts-update-docstring.yml +++ /dev/null @@ -1,2 +0,0 @@ -trivial: -- zos_gather_facts - add sample output to RETURN docstring. (https://github.com/ansible-collections/ibm_zos_core/pull/722) \ No newline at end of file diff --git a/changelogs/fragments/666-zos_tso_command_maxrc.yml b/changelogs/fragments/666-zos_tso_command_maxrc.yml deleted file mode 100644 index c410c00b5..000000000 --- a/changelogs/fragments/666-zos_tso_command_maxrc.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_tso_command - was enhanced to accept `max_rc` as an option. This option - allows a non-zero return code to succeed as a valid return code. - (https://github.com/ansible-collections/ibm_zos_core/pull/666) diff --git a/changelogs/fragments/667-template-support.yml b/changelogs/fragments/667-template-support.yml deleted file mode 100644 index 2ac499a3d..000000000 --- a/changelogs/fragments/667-template-support.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - Add support for Jinja2 templates in zos_copy and zos_job_submit - when using local source files. - (https://github.com/ansible-collections/ibm_zos_core/pull/667) \ No newline at end of file diff --git a/changelogs/fragments/683-zos_job_submit-bugs.yml b/changelogs/fragments/683-zos_job_submit-bugs.yml deleted file mode 100644 index b77fbdbc9..000000000 --- a/changelogs/fragments/683-zos_job_submit-bugs.yml +++ /dev/null @@ -1,35 +0,0 @@ -bugfixes: -- zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that a - stack trace would result in the response, issue 623. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when a job is purged by the system that a - stack trace would result in the response, issue 681. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue where the response did not include the - job log when a non-zero return code would occur, issue 655. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when resources (data sets) identified in JCL - did not exist such that a stack trace would result in the response, issue 624. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would - result in a `type` error that a stack trace would result in the response, - issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when a job encounters a security exception no - job log would would result in the response, issue 684. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when a job is configured for a syntax check - using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` - to return a response, issue 685. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Fixes the issue when a job is configured for a syntax check - using TYPRUN=SCAN that no job log would result in the response, issue 685. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -trivial: -- zos_job_submit - Update documentation to for deprecated `wait` option and - expand on the `wait_time_s` description, issue 670. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_job_submit - Update documentation to describing the significance of '?' - for the 'ret_code' properties 'msg_text', 'msg_code' and 'msg', issue 685. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) -- zos_operator - Update restructured text to include the updated examples. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) \ No newline at end of file diff --git a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml b/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml deleted file mode 100644 index d9924cb2d..000000000 --- a/changelogs/fragments/696-zos-copy-remove-emergency-backup.yml +++ /dev/null @@ -1,6 +0,0 @@ -major_changes: -- zos_copy - Previously, backups were taken when force was set to false; - whether or not a user specified this operation which caused allocation issues - with space and permissions. This removes the automatic backup performed and - reverts to the original logic in that backups must be initiated by the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/896) diff --git a/changelogs/fragments/727-zos-blockinfile-examples.yml b/changelogs/fragments/727-zos-blockinfile-examples.yml deleted file mode 100644 index f1c94c12b..000000000 --- a/changelogs/fragments/727-zos-blockinfile-examples.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: -- zos_blockinfile - was missing examples using Jinja2 and files. This change - adds a Jinja2 example in both the src and block content. It also includes - an example using a file as source. - (https://github.com/ansible-collections/ibm_zos_core/pull/727) \ No newline at end of file diff --git a/changelogs/fragments/729-zos_operator-example-added.yml b/changelogs/fragments/729-zos_operator-example-added.yml deleted file mode 100644 index 46cb6ab84..000000000 --- a/changelogs/fragments/729-zos_operator-example-added.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- zos_operator - had a need for more command examples. This change adds the - D SYMBOLS example. - (https://github.com/ansible-collections/ibm_zos_core/pull/730) \ No newline at end of file diff --git a/changelogs/fragments/731-zos_linefile-disposition_share.yaml b/changelogs/fragments/731-zos_linefile-disposition_share.yaml deleted file mode 100644 index da6dbc19b..000000000 --- a/changelogs/fragments/731-zos_linefile-disposition_share.yaml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: -- zos_lineinfile - would access data sets with exclusive access so no other - task can read the data, this enhancement allows for a data set to be opened - with a disposition set to share so that other tasks can access the data when - option `force` is set to `true`. - (https://github.com/ansible-collections/ibm_zos_core/pull/731) \ No newline at end of file diff --git a/changelogs/fragments/734-copy-loadlib-member-test-case.yml b/changelogs/fragments/734-copy-loadlib-member-test-case.yml deleted file mode 100644 index 4482c61da..000000000 --- a/changelogs/fragments/734-copy-loadlib-member-test-case.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- zos_copy - Adds a test cases to ensure copying from a PDS/E member containing - a loadlib to another PDS/E member loadlib member for issue 601. - (https://github.com/ansible-collections/ibm_zos_core/pull/734) \ No newline at end of file diff --git a/changelogs/fragments/740-zos_copy-volume-symbol-test.yml b/changelogs/fragments/740-zos_copy-volume-symbol-test.yml deleted file mode 100644 index a30a50869..000000000 --- a/changelogs/fragments/740-zos_copy-volume-symbol-test.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: -- zos_copy - prior, there was no test case for symbols on a volume. - This change adds a test case to test a volume which has in it symbols, - issue 738. - (https://github.com/ansible-collections/ibm_zos_core/pull/740) \ No newline at end of file diff --git a/changelogs/fragments/743-zos_copy-encoding-bugs.yml b/changelogs/fragments/743-zos_copy-encoding-bugs.yml deleted file mode 100644 index 1b58ddabe..000000000 --- a/changelogs/fragments/743-zos_copy-encoding-bugs.yml +++ /dev/null @@ -1,9 +0,0 @@ -bugfixes: -- zos_copy - Fixes a bug where files not encoded in IBM-1047 - would trigger an error while computing the record length - for a new destination dataset. Issue 664. - (https://github.com/ansible-collections/ibm_zos_core/pull/743) -- zos_copy - Fixes a bug where the code for fixing an issue with - newlines in files (issue 599) would use the wrong encoding - for normalization. Issue 678. - (https://github.com/ansible-collections/ibm_zos_core/pull/743) diff --git a/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml b/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml deleted file mode 100644 index 0830b8fe3..000000000 --- a/changelogs/fragments/747-failed_when_the_job_name_was_null_or_not_found.yaml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: -- zos_job_output - Error message did not specify the job not found. - Fix now specifies the job_id or job_name being searched to ensure more - information is given back to the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/747) diff --git a/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml b/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml deleted file mode 100644 index d7aae1c14..000000000 --- a/changelogs/fragments/762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_operator - Reported a failure caused by unrelated error response. - Fix now gives a transparent response of the operator to avoid false negatives. - (https://github.com/ansible-collections/ibm_zos_core/pull/762). \ No newline at end of file diff --git a/changelogs/fragments/766-ac-command-replace-makefile.yml b/changelogs/fragments/766-ac-command-replace-makefile.yml deleted file mode 100644 index ca0d17e0f..000000000 --- a/changelogs/fragments/766-ac-command-replace-makefile.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- ac - fixed makefile limitations and monolithic design. Command 'ac' performs - similar function only with greater automation and detection and modularity. - (https://github.com/ansible-collections/ibm_zos_core/pull/766) \ No newline at end of file diff --git a/changelogs/fragments/771-update-ansible-version.yaml b/changelogs/fragments/771-update-ansible-version.yaml deleted file mode 100644 index 92354841b..000000000 --- a/changelogs/fragments/771-update-ansible-version.yaml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: -- doc - Updated the documentation in the README and release_notes.rst to reflect - ansible, ansible-core, Automation Hub and z/OS version. - (https://github.com/ansible-collections/ibm_zos_core/pull/771) -- templates - Update the git issue templates with current and - future product versions. - (https://github.com/ansible-collections/ibm_zos_core/pull/771) \ No newline at end of file diff --git a/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml b/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml deleted file mode 100644 index 672c454b7..000000000 --- a/changelogs/fragments/772-Encode-files-recursively-and-test-case-for-keep-behavior.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. - Fix now encodes all content in a source directory, including - subdirectories. - (https://github.com/ansible-collections/ibm_zos_core/pull/772). \ No newline at end of file diff --git a/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml b/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml deleted file mode 100644 index 0a8ce0adb..000000000 --- a/changelogs/fragments/773-return-dynamically-created-dest-attrs.yaml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: -- zos_copy - Adds block_size, record_format, record_length, space_primary, - space_secondary, space_type and type in the return output when - the destination data set does not exist and has to be created - by the module. - (https://github.com/ansible-collections/ibm_zos_core/pull/773) \ No newline at end of file diff --git a/changelogs/fragments/778-query-new-fields.yml b/changelogs/fragments/778-query-new-fields.yml deleted file mode 100644 index 9f2c71579..000000000 --- a/changelogs/fragments/778-query-new-fields.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: -- zos_job_query - Adds new fields job_class, svc_class, priority, asid, - creation_datetime, and queue_position to the return output when querying - or submitting a job. Available when using ZOAU v1.2.3 or greater. - (https://github.com/ansible-collections/ibm_zos_core/pull/778) diff --git a/changelogs/fragments/789-ac-command-add-test.yml b/changelogs/fragments/789-ac-command-add-test.yml deleted file mode 100644 index 56cae6936..000000000 --- a/changelogs/fragments/789-ac-command-add-test.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- ac - Adds support to run single test from test suite. - (https://github.com/ansible-collections/ibm_zos_core/pull/793) \ No newline at end of file diff --git a/changelogs/fragments/789-ac-command-updates.yml b/changelogs/fragments/789-ac-command-updates.yml deleted file mode 100644 index c0c60dcf1..000000000 --- a/changelogs/fragments/789-ac-command-updates.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- ac - Adds new mounts, targets and ansible 2.15 requirements.env. - (https://github.com/ansible-collections/ibm_zos_core/pull/789) \ No newline at end of file diff --git a/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml b/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml deleted file mode 100644 index 3d29e906e..000000000 --- a/changelogs/fragments/791-zos_data_set-update-vsam-copy.yml +++ /dev/null @@ -1,11 +0,0 @@ -bugfixes: -- zos_copy - Test case for recursive encoding directories reported a - UTF-8 failure. This change ensures proper test coverage for nested - directories and file permissions. - (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_copy - Reported a warning about the use of _play_context.verbosity.This - change corrects the module action to prevent the warning message. - (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_fetch - Reported a warning about the use of _play_context.verbosity.This - change corrects the module action to prevent the warning message. - (https://github.com/ansible-collections/ibm_zos_core/pull/806). \ No newline at end of file diff --git a/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml b/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml deleted file mode 100644 index dd5b71220..000000000 --- a/changelogs/fragments/794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml +++ /dev/null @@ -1,6 +0,0 @@ -bugfixes: - - zos_copy - Reported a false positive such that the response would have - `changed=true` when copying from a source (src) or destination (dest) - data set that was in use (DISP=SHR). This change now displays an appropriate - error message and returns `changed=false`. - (https://github.com/ansible-collections/ibm_zos_core/pull/794). \ No newline at end of file diff --git a/changelogs/fragments/795_overwrite_permissions_on_copy.yml b/changelogs/fragments/795_overwrite_permissions_on_copy.yml deleted file mode 100644 index 2a8d826d7..000000000 --- a/changelogs/fragments/795_overwrite_permissions_on_copy.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_copy - kept permissions on target directory when copy overwrote - files. The fix now set permissions when mode is given. - (https://github.com/ansible-collections/ibm_zos_core/pull/795) \ No newline at end of file diff --git a/changelogs/fragments/806-zos_copy_fetch-display-verbose.yml b/changelogs/fragments/806-zos_copy_fetch-display-verbose.yml deleted file mode 100644 index c4ad9901c..000000000 --- a/changelogs/fragments/806-zos_copy_fetch-display-verbose.yml +++ /dev/null @@ -1,17 +0,0 @@ -trivial: -- zos_data_set - when a member is created by the module, the format is type - data which is not suitable for executables. This change describes the - format used when creating member. - (https://github.com/ansible-collections/ibm_zos_core/pull/791) -- ac - Reported an issue when functional tests ran leaving behind files. Fix - now removes the unwanted files. - (https://github.com/ansible-collections/ibm_zos_core/pull/791) -bugfixes: -- zos_data_set - Reported a failure caused when `present=absent` for a VSAM - data set leaving behind cluster components. Fix introduces a new logical - flow that will evaluate the volumes, compare it to the provided value and - if necessary catalog and delete. - (https://github.com/ansible-collections/ibm_zos_core/pull/791). -- module_utils - data_set.py - Reported a failure caused when cataloging a - VSAM data set. Fix now corrects how VSAM data sets are cataloged. - (https://github.com/ansible-collections/ibm_zos_core/pull/791). \ No newline at end of file diff --git a/changelogs/fragments/812-ansible-lint.yml b/changelogs/fragments/812-ansible-lint.yml deleted file mode 100644 index 0cb520884..000000000 --- a/changelogs/fragments/812-ansible-lint.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- ansible-lint - enabling ansible-lint for 2.15 and Ansible Automation Platform - certification. - (https://github.com/ansible-collections/ibm_zos_core/pull/812) diff --git a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml b/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml deleted file mode 100644 index 5b4e14aa8..000000000 --- a/changelogs/fragments/824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: -- zos_copy - Module returned the dynamic values created with the same dataset type - and record format. Fix validate the correct dataset type and record format of - target created. - (https://github.com/ansible-collections/ibm_zos_core/pull/824) \ No newline at end of file diff --git a/changelogs/fragments/839-Add-Field-to-zos-job-query.yml b/changelogs/fragments/839-Add-Field-to-zos-job-query.yml deleted file mode 100644 index 52370356c..000000000 --- a/changelogs/fragments/839-Add-Field-to-zos-job-query.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: -- zos_job_query - zoau added 'program_name' to their field output - starting with v1.2.4. This enhancement checks for that version and passes the extra column through. - (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_job_submit - zoau added 'program_name' to their field output - starting with v1.2.4. This enhancement checks for that version and passes the extra column through. - (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_job_output - zoau added 'program_name' to their field output - starting with v1.2.4. This enhancement checks for that version and passes the extra column through. - (https://github.com/ansible-collections/ibm_zos_core/pull/841) diff --git a/changelogs/fragments/840-redesign-test-cases.yml b/changelogs/fragments/840-redesign-test-cases.yml deleted file mode 100644 index c998eeee4..000000000 --- a/changelogs/fragments/840-redesign-test-cases.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: -- zos_lininfile - Adjust test cases to be in one document and clearer to follow. -- zos_blockinfile - Adjust test cases to be in one document and clearer to follow. -bugfixes: -- zos_blockinfile - Test case generate a data set that was not correctly removed. - Changes delete the correct data set not only member. - (https://github.com/ansible-collections/ibm_zos_core/pull/840) \ No newline at end of file diff --git a/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml b/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml deleted file mode 100644 index 64ab4871c..000000000 --- a/changelogs/fragments/880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml +++ /dev/null @@ -1,6 +0,0 @@ -trivial: -- zos_tso_command - Test suite was set up to run sequentially such that - certain tests relied on prior test cases. The new changes combine those - inter-dependent test cases into a single test case so that each individual - test case can now be run stand-alone. - (https://github.com/ansible-collections/ibm_zos_core/pull/895) \ No newline at end of file diff --git a/changelogs/fragments/906-update-doc-generation.yml b/changelogs/fragments/906-update-doc-generation.yml deleted file mode 100644 index f2e5ae316..000000000 --- a/changelogs/fragments/906-update-doc-generation.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: -- make - Current doc generation requires manual intervention, this change will - allow for doc generation without any manual intervention and removes warnings. - (https://github.com/ansible-collections/ibm_zos_core/pull/906) \ No newline at end of file diff --git a/changelogs/fragments/911-Improve-job-query-performance.yml b/changelogs/fragments/911-Improve-job-query-performance.yml deleted file mode 100644 index a6722636e..000000000 --- a/changelogs/fragments/911-Improve-job-query-performance.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: -- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. - This change removes those resulting in a performance increase in job related queries. - (https://github.com/ansible-collections/ibm_zos_core/pull/911) diff --git a/changelogs/fragments/v1.7.0-beta.1_summary.yml b/changelogs/fragments/v1.7.0-beta.1_summary.yml deleted file mode 100644 index 727e3da75..000000000 --- a/changelogs/fragments/v1.7.0-beta.1_summary.yml +++ /dev/null @@ -1,6 +0,0 @@ -release_summary: | - Release Date: '2023-07-26' - This changelog describes all changes made to the modules and plugins included - in this collection. The release date is the date the changelog is created. - For additional details such as required dependencies and availability review - the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file From 23a06330eb7d4cb73669fe4637d3a8ae2218c2f8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 27 Jul 2023 23:53:49 -0700 Subject: [PATCH 156/495] Update test with string match Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/functional/modules/test_zos_archive_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index e3b4b4ba7..8ac4f2e9d 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -545,7 +545,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): assert MVS_DEST_ARCHIVE in c_result.get("stdout") - assert data_set.get("name") not in c_result.get("stdout") + assert data_set.get("name") != c_result.get("stdout") finally: hosts.all.zos_data_set(name=data_set.get("name"), state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") From c82ceee0034344d1b624b34524b94a48546de40e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 31 Jul 2023 22:45:32 -0700 Subject: [PATCH 157/495] Unused changed variable, found by flake8 Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_archive.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 0ace2b608..f00968d62 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -790,7 +790,8 @@ def create_dest_ds(self, name): name {str} - name of the newly created data set. """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH - changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + #changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) # rc, out, err = self.module.run_command(cmd) From 5edf8939ef8bea5d42d348bb91136811481b548d Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 31 Jul 2023 22:46:25 -0700 Subject: [PATCH 158/495] Unused 'normalize_line_endings' functiion found by flake8 Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_copy.py | 47 ------------------------------------- 1 file changed, 47 deletions(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 02f71ab21..6b5e8ab7f 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2162,53 +2162,6 @@ def data_set_locked(dataset_name): return False -def normalize_line_endings(src, encoding=None): - """ - Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes - its line endings to LF. - - Arguments: - src (str) -- Path of a USS file. - encoding (dict, optional) -- Encoding options for the module. - - Returns: - str -- Path to the normalized file. - """ - # Before copying into a destination dataset, we'll make sure that - # the source file doesn't contain any carriage returns that would - # result in empty records in the destination. - # Due to the differences between encodings, we'll normalize to IBM-037 - # before checking the EOL sequence. - enc_utils = encode.EncodeUtils() - src_tag = enc_utils.uss_file_tag(src) - copy_handler = CopyHandler(AnsibleModuleHelper(dict())) - - if src_tag == "untagged": - # This should only be true when src is a remote file and no encoding - # was specified by the user. - if not encoding: - encoding = {"from": encode.Defaults.get_default_system_charset()} - src_tag = encoding["from"] - - if src_tag != "IBM-037": - fd, converted_src = tempfile.mkstemp() - os.close(fd) - - enc_utils.uss_convert_encoding( - src, - converted_src, - src_tag, - "IBM-037" - ) - copy_handler._tag_file_encoding(converted_src, "IBM-037") - src = converted_src - - if copy_handler.file_has_crlf_endings(src): - src = copy_handler.create_temp_with_lf_endings(src) - - return src - - def run_module(module, arg_def): # ******************************************************************** # Verify the validity of module args. BetterArgParser raises ValueError From 320caccc627332bd82a77e145c9eb62957732a15 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 31 Jul 2023 22:56:16 -0700 Subject: [PATCH 159/495] Correct flake8, import 'path' from line 18 shadowed by loop variable Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/module_utils/template.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py index 3f0c95021..308946da2 100644 --- a/plugins/module_utils/template.py +++ b/plugins/module_utils/template.py @@ -298,9 +298,9 @@ def render_dir_template(self, variables): to_native(err) )) - for path, subdirs, files in os.walk(self.template_dir): + for dirpath, subdirs, files in os.walk(self.template_dir): for template_file in files: - relative_dir = os.path.relpath(path, self.template_dir) + relative_dir = os.path.relpath(dirpath, self.template_dir) file_path = os.path.normpath(os.path.join(relative_dir, template_file)) try: From f2f41f53bed265561dfa2370df7c239bc93c1b0f Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 31 Jul 2023 23:03:18 -0700 Subject: [PATCH 160/495] Correct comment starting at a new line Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_archive.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index f00968d62..6b7fcbeb0 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -791,7 +791,7 @@ def create_dest_ds(self, name): """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) - #changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) # rc, out, err = self.module.run_command(cmd) From 2f8af15be818fa4ee439f84f143bd97098bdaf7e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 1 Aug 2023 09:39:27 -0700 Subject: [PATCH 161/495] Updated new script copyright year Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/scripts/post-zos_apf.sh | 2 +- docs/scripts/pre-doc-gen.sh | 2 +- docs/scripts/pre-template.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/scripts/post-zos_apf.sh b/docs/scripts/post-zos_apf.sh index a74207e48..befcaecfe 100755 --- a/docs/scripts/post-zos_apf.sh +++ b/docs/scripts/post-zos_apf.sh @@ -1,7 +1,7 @@ #!/bin/sh ################################################################################ -# © Copyright IBM Corporation 2020 +# © Copyright IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/docs/scripts/pre-doc-gen.sh b/docs/scripts/pre-doc-gen.sh index 31e287c11..e2f4d362c 100755 --- a/docs/scripts/pre-doc-gen.sh +++ b/docs/scripts/pre-doc-gen.sh @@ -1,7 +1,7 @@ #!/bin/sh ################################################################################ -# © Copyright IBM Corporation 2020 +# © Copyright IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/docs/scripts/pre-template.sh b/docs/scripts/pre-template.sh index 8c627e0a5..ca35775d9 100755 --- a/docs/scripts/pre-template.sh +++ b/docs/scripts/pre-template.sh @@ -1,7 +1,7 @@ #!/bin/sh ################################################################################ -# © Copyright IBM Corporation 2020 +# © Copyright IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 36c11dfc8a1751db8470f6cedecfe6604f88ecd0 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Tue, 1 Aug 2023 11:17:11 -0700 Subject: [PATCH 162/495] Fix release notes formatting Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index e512de025..948851218 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -29,6 +29,7 @@ Minor Changes - ``zos_data_set`` - supports record format *F* (fixed) where one physical block on disk is one logical record and all the blocks and records are the same size. - ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - ``zos_job_query`` + - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - removes unnecessary queries to find DDs improving the modules performance. - ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). From d5b949e71eb5830cc9664d7c840cfc1b06f7b8f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 3 Aug 2023 16:22:00 -0600 Subject: [PATCH 163/495] Bugfix/583/zos lineinfile does not behave community (#916) * First iteration * Clean test apf_func * Add test case to validate change in ZOAU 1.2.4 * Change test case for the new change * Change zos_job_query test accord to ZOAU 1.2.4 * Restore test as dev * Return test to originals * Return job_query as original * Add fragment * Add test case for DS and change fragments * Solve check of testing for DS * Change logic of tests --- ...s-lineinfile-does-not-behave-community.yml | 4 ++ .../modules/test_zos_lineinfile_func.py | 56 ++++++++++++++++++- 2 files changed, 57 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml diff --git a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml new file mode 100644 index 000000000..c1639c769 --- /dev/null +++ b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml @@ -0,0 +1,4 @@ +bugfix: +- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. + Fix now prevents a duplicate entry if the line already exists in the target file. + (https://github.com/ansible-collections/ibm_zos_core/pull/916) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 6a29c79b8..754316ff3 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -103,7 +103,7 @@ ZOAU_ROOT=/usr/lpp/zoautil/v100 export ZOAU_ROOT export _BPXK_AUTOCVT -export ZOAU_ROOT""" +export 'ZOAU_ROOT'""" EXPECTED_INSERTBEFORE_BOF="""# this is file is for setting env vars if [ -z STEPLIB ] && tty -s; @@ -310,7 +310,7 @@ def test_uss_line_insertbefore_regex(ansible_zos_module): @pytest.mark.uss def test_uss_line_insertafter_eof(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present") + params = dict(insertafter="EOF", line="export 'ZOAU_ROOT'", state="present") full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -515,6 +515,28 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): finally: remove_uss_environment(ansible_zos_module) +@pytest.mark.uss +def test_uss_line_does_not_insert_repeated(ansible_zos_module): + hosts = ansible_zos_module + params = dict(path="", line='ZOAU_ROOT=/usr/lpp/zoautil/v100', state="present") + full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + content = TEST_CONTENT + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == TEST_CONTENT + # Run lineinfle module with same params again, ensure duplicate entry is not made into file + hosts.all.zos_lineinfile(**params) + results = hosts.all.shell(cmd="""grep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' {0} """.format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == '1' + finally: + remove_uss_environment(ansible_zos_module) ######################### # Dataset test cases @@ -573,7 +595,7 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): def test_ds_line_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present") + params = dict(insertafter="EOF", line="export 'ZOAU_ROOT'", state="present") test_name = "DST3" temp_file = "/tmp/{0}".format(test_name) ds_name = test_name.upper() + "." + ds_type @@ -946,6 +968,34 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") +@pytest.mark.ds +@pytest.mark.parametrize("dstype", DS_TYPE) +def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): + hosts = ansible_zos_module + ds_type = dstype + params = dict(line='ZOAU_ROOT=/usr/lpp/zoautil/v100', state="present") + test_name = "DST15" + temp_file = "/tmp/{0}".format(test_name) + ds_name = test_name.upper() + "." + ds_type + content = TEST_CONTENT + try: + ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) + params["path"] = ds_full_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == TEST_CONTENT + # Run lineinfle module with same params again, ensure duplicate entry is not made into file + hosts.all.zos_lineinfile(**params) + results = hosts.all.shell(cmd="""dgrep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' "{0}" """.format(params["path"])) + response = params["path"] + " " + "1" + for result in results.contacted.values(): + assert result.get("stdout") == response + finally: + remove_ds_environment(ansible_zos_module, ds_name) + ######################### # Encoding tests ######################### From f77a9f238b997f1503f91a080b6817463744bd0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 3 Aug 2023 19:16:27 -0600 Subject: [PATCH 164/495] Deprecate debug=true in zos_blockinfile and set as_json=true (#904) * Change debug for as_json option * Add fragment --------- Co-authored-by: ketankelkar <ktnklkr@gmail.com> --- ..._debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml | 2 ++ plugins/modules/zos_blockinfile.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml diff --git a/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml b/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml new file mode 100644 index 000000000..9218a0ed3 --- /dev/null +++ b/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml @@ -0,0 +1,2 @@ +deprecated_features: + - zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). \ No newline at end of file diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 7484d93ec..1751c6472 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -386,7 +386,7 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the destination was modified. """ - return datasets.blockinfile(src, block=block, marker=marker, ins_aft=ins_aft, ins_bef=ins_bef, encoding=encoding, state=True, debug=True, options=force) + return datasets.blockinfile(src, block=block, marker=marker, ins_aft=ins_aft, ins_bef=ins_bef, encoding=encoding, state=True, options=force, as_json=True) def absent(src, marker, encoding, force): @@ -402,7 +402,7 @@ def absent(src, marker, encoding, force): found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the destination was modified. """ - return datasets.blockinfile(src, marker=marker, encoding=encoding, state=False, debug=True, options=force) + return datasets.blockinfile(src, marker=marker, encoding=encoding, state=False, options=force, as_json=True) def quotedString(string): From 4e7983e6ccaba382fd39b11e09429074afa5ccfc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 4 Aug 2023 14:12:59 -0600 Subject: [PATCH 165/495] Add test case to validate response come back complete (#918) * Add test case to validate response come back complete * Add fragment --------- Co-authored-by: ketankelkar <ktnklkr@gmail.com> --- ...918-zos-operator-response-come-back-truncate.yaml | 4 ++++ tests/functional/modules/test_zos_operator_func.py | 12 ++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml diff --git a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml new file mode 100644 index 000000000..ef5ae8b36 --- /dev/null +++ b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml @@ -0,0 +1,4 @@ +bugfix: +- zos_operator: The last line of the operator was missing in the response of the module. + Fix now ensures the presence of the full output of the operator. + (https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index dbdb4f065..84f593f51 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -118,3 +118,15 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): assert result.get("content") is not None # Account for slower network assert result.get('elapsed') <= (2 * wait_time_s) + + +def test_response_come_back_complete(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_operator(cmd="\$dspl") + res = dict() + res["stdout"] = [] + for result in results.contacted.values(): + stdout = result.get('content') + # HASP646 Only appears in the last line that before did not appears + last_line = len(stdout) + assert "HASP646" in stdout[last_line - 1] \ No newline at end of file From b857fdfdc2111b0936cb76ddb25758ed5aa51444 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 17 Aug 2023 13:11:14 -0600 Subject: [PATCH 166/495] Remove conditional unnecessary (#934) * Remove conditional unecesary * Add fragment * Correct the conditional --- .../fragments/934-Remove-conditional-unnecessary.yml | 2 ++ tests/functional/modules/test_zos_blockinfile_func.py | 8 ++------ 2 files changed, 4 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/934-Remove-conditional-unnecessary.yml diff --git a/changelogs/fragments/934-Remove-conditional-unnecessary.yml b/changelogs/fragments/934-Remove-conditional-unnecessary.yml new file mode 100644 index 000000000..bf07c7f32 --- /dev/null +++ b/changelogs/fragments/934-Remove-conditional-unnecessary.yml @@ -0,0 +1,2 @@ +- trivial: + - zos_blockinfile - remove test conditional unnecessary (https://github.com/ansible-collections/ibm_zos_core/pull/934). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 23982aeec..226f34477 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1218,12 +1218,8 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - if backup_name: - backup_ds_name = result.get("backup_name") - assert backup_ds_name is not None - else: - backup_ds_name = result.get("backup_name") - assert backup_ds_name is not None + backup_ds_name = result.get("backup_name") + assert backup_ds_name is not None results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF From 5d43c7c4f9ad308797177016a418ab21be94cf65 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 28 Aug 2023 12:32:34 -0600 Subject: [PATCH 167/495] v1.7.0 beta.2 into dev (#953) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Staging v1.7.0 beta.1 (#915) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan… * Enhancement/866 archive (#930) * Added action plugin zos_unarchive * Added zos_archive changes * Added zos_unarchive changes * Added zos_archive tests changes * Added test zos_unarchive changes * Added zos_archive changes * fixed pep8 issues * Changed source to src in docs * Added correct copyright year * Updated docs * Added changelog fragments * Updated docs * Updated galaxy.yml * Updated meta * Updated docs * Added zos_gather_facts rst * Added changelog * Added release notes * Changed variable name to avoid shadowing import * Delete 930-archive-post-beta.yml * Delete v1.7.0-beta.2_summary.yml * Resolve conflicts --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> --- CHANGELOG.rst | 25 +++ changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 28 +++ docs/source/modules/zos_archive.rst | 16 +- docs/source/modules/zos_gather_facts.rst | 5 + docs/source/release_notes.rst | 34 +++- galaxy.yml | 4 +- meta/ibm_zos_core_meta.yml | 2 +- plugins/action/zos_unarchive.py | 38 ++-- plugins/modules/zos_archive.py | 188 +++++++++++++----- .../modules/test_zos_archive_func.py | 15 +- .../modules/test_zos_unarchive_func.py | 30 ++- 12 files changed, 297 insertions(+), 90 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 826161e56..c6b3a91e0 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,31 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.7.0-beta.2 +============= + +Release Summary +--------------- + +Release Date: '2023-08-21' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + +Bugfixes +-------- + +- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + v1.7.0-beta.1 ============= diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 3520dc55a..c07ea8e62 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -126,4 +126,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.7.0-beta.1 +version: 1.7.0-beta.2 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 5f4da9de0..753c8e318 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -981,3 +981,31 @@ releases: name: zos_unarchive namespace: '' release_date: '2023-07-26' + 1.7.0-beta.2: + changes: + bugfixes: + - zos_archive - Module did not return the proper src state after archiving. + Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + minor_changes: + - zos_archive - If destination data set space is not provided then the module + computes it based on the src list and/or expanded src list based on pattern + provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_archive - When xmit faces a space error in xmit operation because of dest + or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_unarchive - When copying to remote fails now a proper error message is + displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_unarchive - When copying to remote if space_primary is not defined, then + is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + release_summary: 'Release Date: ''2023-08-21'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 930-archive-post-beta.yml + - v1.7.0-beta.2_summary.yml + release_date: '2023-08-21' diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 221de41ec..03eaafbae 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -112,14 +112,20 @@ dest If *dest* is a nonexistent USS file, it will be created. + If *dest* is an existing file or data set and *force=true*, the existing *dest* will be deleted and recreated with attributes defined in the *dest_data_set* option or computed by the module. + + If *dest* is an existing file or data set and *force=false* or not specified, the module exits with a note to the user. + Destination data set attributes can be set using *dest_data_set*. + Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the *dest_data_set* option will improve performance. + | **required**: True | **type**: str exclude - Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from path list and glob expansion. + Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from src list and glob expansion. Patterns (wildcards) can contain one of the following, `?`, `*`. @@ -152,7 +158,7 @@ mode The mode may also be specified as a symbolic mode (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. - *mode=preserve* means that the file will be given the same permissions as the source file. + *mode=preserve* means that the file will be given the same permissions as the src file. | **required**: False | **type**: str @@ -170,7 +176,7 @@ owner remove - Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *path*. + Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *src*. | **required**: False | **type**: bool @@ -387,6 +393,8 @@ Notes When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. + tar, zip, bz2 and pax are archived using python ``tarfile`` library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. + See Also @@ -425,7 +433,7 @@ dest_state ``compress`` when the file is compressed, but not an archive. - ``incomplete`` when the file is an archive, but some files under *path* were not found. + ``incomplete`` when the file is an archive, but some files under *src* were not found. | **returned**: success | **type**: str diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 63bd22701..232cc26ba 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -22,6 +22,11 @@ Synopsis - Note, the module will fail fast if any unsupported options are provided. This is done to raise awareness of a failure in an automation setting. +Requirements +------------ + +- ZOAU 1.2.1 or later. + diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 948851218..9a7bdb059 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,6 +6,39 @@ Releases ======== +Version 1.7.0-beta.2 +==================== + +Minor Changes +------------- +- ``zos_archive`` + + - When xmit faces a space error in xmit operation because of dest or log data set being filled raises an appropriate error hint. + - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. + +- ``zos_unarchive`` + + - When copying to remote fails now a proper error message is displayed. + - When copying to remote if space_primary is not defined, then is defaulted to 5M. + +Bugfixes +-------- +- ``zos_archive`` - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS V2R3`_ or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. + Version 1.7.0-beta.1 ==================== @@ -29,7 +62,6 @@ Minor Changes - ``zos_data_set`` - supports record format *F* (fixed) where one physical block on disk is one logical record and all the blocks and records are the same size. - ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - ``zos_job_query`` - - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - removes unnecessary queries to find DDs improving the modules performance. - ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). diff --git a/galaxy.yml b/galaxy.yml index 87f10f272..b1090564c 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.7.0-beta.1 +version: 1.7.0-beta.2 # Collection README file readme: README.md @@ -19,7 +19,7 @@ authors: - Ivan Moreno <ivan.moreno.soto@ibm.com> - Oscar Fernando Flores Garcia <fernando.flores@ibm.com> - Jenny Huang <jennyhuang@ibm.com> - - Marcel Guitierrez <andre.marcel.gutierrez@ibm.com> + - Marcel Gutierrez <andre.marcel.gutierrez@ibm.com> # Description description: The IBM z/OS core collection includes connection plugins, action plugins, modules, filters and ansible-doc to automate tasks on z/OS. diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 5e265309f..f659df786 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.7.0-beta.1" +version: "1.7.0-beta.2" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index 7c310a4a3..19cbf5ead 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -61,6 +61,8 @@ def run(self, tmp=None, task_vars=None): format_name = format.get("name") copy_module_args = dict() dest_data_set = format.get("dest_data_set") + if dest_data_set is None: + dest_data_set = dict() dest = "" if source.startswith('~'): source = os.path.expanduser(source) @@ -80,14 +82,12 @@ def run(self, tmp=None, task_vars=None): task_vars=task_vars, ) dest = cmd_res.get("stdout") - if dest_data_set is None: - if format_name == 'terse': - dest_data_set = dict(type='SEQ', record_format='FB', record_length=1024) - if format_name == 'xmit': - dest_data_set = dict(type='SEQ', record_format='FB', record_length=80) - else: - # Raise unsupported format name - None + if dest_data_set.get("space_primary") is None: + dest_data_set.update(space_primary=5, space_type="M") + if format_name == 'terse': + dest_data_set.update(type='SEQ', record_format='FB', record_length=1024) + if format_name == 'xmit': + dest_data_set.update(type='SEQ', record_format='FB', record_length=80) copy_module_args.update( dict( @@ -107,15 +107,17 @@ def run(self, tmp=None, task_vars=None): templar=self._templar, shared_loader_obj=self._shared_loader_obj) result.update(zos_copy_action_module.run(task_vars=task_vars)) - - module_args["src"] = dest - display.vvv(u"Copy args {0}".format(result), host=self._play_context.remote_addr) - - result.update( - self._execute_module( - module_name="ibm.ibm_zos_core.zos_unarchive", - module_args=module_args, - task_vars=task_vars, + display.vvv(u"Copy result {0}".format(result), host=self._play_context.remote_addr) + if result.get("msg") is None: + module_args["src"] = dest + + result.update( + self._execute_module( + module_name="ibm.ibm_zos_core.zos_unarchive", + module_args=module_args, + task_vars=task_vars, + ) ) - ) + else: + result.update(dict(failed=True)) return result diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 6b7fcbeb0..c48fd767e 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -109,13 +109,22 @@ - I(dest) can be a USS file or MVS data set name. - If I(dest) has missing parent directories, they will be created. - If I(dest) is a nonexistent USS file, it will be created. + - If I(dest) is an existing file or data set and I(force=true), + the existing I(dest) will be deleted and recreated with attributes + defined in the I(dest_data_set) option or computed by the module. + - If I(dest) is an existing file or data set and I(force=false) or not + specified, the module exits with a note to the user. - Destination data set attributes can be set using I(dest_data_set). + - Destination data set space will be calculated based on space of + source data sets provided and/or found by expanding the pattern name. + Calculating space can impact module performance. Specifying space attributes + in the I(dest_data_set) option will improve performance. type: str required: true exclude: description: - Remote absolute path, glob, or list of paths, globs or data set name - patterns for the file, files or data sets to exclude from path list + patterns for the file, files or data sets to exclude from src list and glob expansion. - "Patterns (wildcards) can contain one of the following, `?`, `*`." - "* matches everything." @@ -148,7 +157,7 @@ (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. - I(mode=preserve) means that the file will be given the same permissions - as the source file. + as the src file. type: str required: false owner: @@ -164,7 +173,7 @@ description: - Remove any added source files , trees or data sets after module L(zos_archive,./zos_archive.html) adds them to the archive. - Source files, trees and data sets are identified with option I(path). + Source files, trees and data sets are identified with option I(src). type: bool required: false default: false @@ -301,6 +310,10 @@ respectively. - When packing and using C(use_adrdssu) flag the module will take up to two times the space indicated in C(dest_data_set). + - tar, zip, bz2 and pax are archived using python C(tarfile) library which + uses the latest version available for each format, for compatibility when + opening from system make sure to use the latest available version for the + intended format. seealso: @@ -373,7 +386,7 @@ - C(archive) when the file is an archive. - C(compress) when the file is compressed, but not an archive. - C(incomplete) when the file is an archive, but some files under - I(path) were not found. + I(src) were not found. type: str returned: success missing: @@ -403,6 +416,7 @@ ''' from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_bytes from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, @@ -416,6 +430,8 @@ import abc import glob import re +import math +from hashlib import sha256 try: @@ -427,6 +443,7 @@ AMATERSE_RECORD_LENGTH = 1024 STATE_ABSENT = 'absent' +STATE_PRESENT = 'present' STATE_ARCHIVE = 'archive' STATE_COMPRESSED = 'compressed' STATE_INCOMPLETE = 'incomplete' @@ -488,6 +505,8 @@ def __init__(self, module): self.expanded_sources = "" self.expanded_exclude_sources = "" self.dest_state = STATE_ABSENT + self.state = STATE_PRESENT + self.xmit_log_data_set = "" def targets_exist(self): return bool(self.targets) @@ -509,7 +528,7 @@ def find_targets(self): pass @abc.abstractmethod - def _get_checksums(self, path): + def _get_checksums(self, src): pass @abc.abstractmethod @@ -524,17 +543,23 @@ def is_different_from_original(self): def remove_targets(self): pass + @abc.abstractmethod + def compute_dest_size(self): + pass + @property def result(self): return { 'archived': self.archived, 'dest': self.dest, + 'state': self.state, 'arcroot': self.arcroot, 'dest_state': self.dest_state, 'changed': self.changed, 'missing': self.not_found, 'expanded_sources': list(self.expanded_sources), 'expanded_exclude_sources': list(self.expanded_exclude_sources), + 'xmit_log_data_set': self.xmit_log_data_set, } @@ -569,11 +594,29 @@ def find_targets(self): else: self.not_found.append(path) - def _get_checksums(self, path): - md5_cmd = "md5 -r \"{0}\"".format(path) - rc, out, err = self.module.run_command(md5_cmd) - checksums = out.split(" ")[0] - return checksums + def _get_checksums(self, src): + """Calculate SHA256 hash for a given file + + Arguments: + src {str} -- The absolute path of the file + + Returns: + str -- The SHA256 hash of the contents of input file + """ + b_src = to_bytes(src) + if not os.path.exists(b_src) or os.path.isdir(b_src): + return None + blksize = 64 * 1024 + hash_digest = sha256() + try: + with open(to_bytes(src, errors="surrogate_or_strict"), "rb") as infile: + block = infile.read(blksize) + while block: + hash_digest.update(block) + block = infile.read(blksize) + except Exception: + raise + return hash_digest.hexdigest() def dest_checksums(self): if self.dest_exists(): @@ -586,11 +629,18 @@ def is_different_from_original(self): return True def remove_targets(self): + self.state = STATE_ABSENT for target in self.archived: if os.path.isdir(target): - os.removedirs(target) + try: + os.removedirs(target) + except Exception: + self.state = STATE_INCOMPLETE else: - os.remove(target) + try: + os.remove(target) + except PermissionError: + self.state = STATE_INCOMPLETE def archive_targets(self): self.file = self.open(self.dest) @@ -699,34 +749,6 @@ def find_targets(self): else: self.not_found.append(path) - def _compute_dest_data_set_size(self): - """ - Computes the attributes that the destination data set or temporary destination - data set should have in terms of size, record_length, etc. - """ - - """ - - Size of temporary DS for archive handling. - - If remote_src then we can get the source_size from archive on the system. - - If not remote_src then we can get the source_size from temporary_ds. - Both are named src so no problemo. - - If format is xmit, dest_data_set size is the same as source_size. - - If format is terse, dest_data_set size is different than the source_size, has to be greater, - but how much? In this case we can add dest_data_set option. - - Apparently the only problem is when format name is terse. - """ - - # Get the size from the system - default_size = 5 - dest_space_type = 'M' - dest_primary_space = int(default_size) - return dest_primary_space, dest_space_type - def _create_dest_data_set( self, name=None, @@ -833,11 +855,13 @@ def dump_into_temp_ds(self, temp_ds): ) return rc - def _get_checksums(self, path): - md5_cmd = "md5 -r \"//'{0}'\"".format(path) - rc, out, err = self.module.run_command(md5_cmd) - checksums = out.split(" ")[0] - return checksums + def _get_checksums(self, src): + sha256_cmd = "sha256 \"//'{0}'\"".format(src) + rc, out, err = self.module.run_command(sha256_cmd) + checksums = out.split("= ") + if len(checksums) > 0: + return checksums[1] + return None def dest_checksums(self): if self.dest_exists(): @@ -856,8 +880,14 @@ def dest_exists(self): return data_set.DataSet.data_set_exists(self.dest) def remove_targets(self): + self.state = STATE_ABSENT for target in self.archived: - data_set.DataSet.ensure_absent(target) + try: + changed = data_set.DataSet.ensure_absent(target) + except Exception: + self.state = STATE_INCOMPLETE + if not changed: + self.state = STATE_INCOMPLETE return def expand_mvs_paths(self, paths): @@ -892,10 +922,30 @@ def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False data_set.DataSet.ensure_absent(ds) if uss_files is not None: for file in uss_files: - os.remove(file) + try: + os.remove(file) + except PermissionError: + self.state = STATE_INCOMPLETE if remove_targets: + self.remove_targets() + + def compute_dest_size(self): + """ + Calculate the destination data set based on targets found. + Arguments: + + Returns: + {int} - Destination computed space in kilobytes. + """ + if self.dest_data_set.get("space_primary") is None: + dest_space = 0 for target in self.targets: - data_set.DataSet.ensure_absent(target) + data_sets = datasets.listing(target) + for ds in data_sets: + dest_space += int(ds.to_dict().get("total_space")) + # space unit returned from listings is bytes + dest_space = math.ceil(dest_space / 1024) + self.dest_data_set.update(space_primary=dest_space, space_type="K") class AMATerseArchive(MVSArchive): @@ -972,15 +1022,20 @@ def add(self, src, archive): archive: {str} """ log_option = "LOGDSNAME({0})".format(self.xmit_log_data_set) if self.xmit_log_data_set else "NOLOG" - xmit_cmd = """ XMIT A.B - + xmit_cmd = """ + PROFILE NOPREFIX + XMIT A.B - FILE(SYSUT1) OUTFILE(SYSUT2) - {0} - """.format(log_option) dds = {"SYSUT1": "{0},shr".format(src), "SYSUT2": archive} rc, out, err = mvs_cmd.ikjeft01(cmd=xmit_cmd, authorized=True, dds=dds) if rc != 0: + # self.get_error_hint handles the raw output of XMIT executed through TSO, contains different + # error hints based on the abend code returned. + error_hint = self.get_error_hint(out) self.module.fail_json( - msg="An error occurred while executing 'TSO XMIT' to archive {0} into {1}".format(src, archive), + msg="An error occurred while executing 'TSO XMIT' to archive {0} into {1}.{2}".format(src, archive, error_hint), stdout=out, stderr=err, rc=rc, @@ -1022,6 +1077,38 @@ def archive_targets(self): self.add(source, dest) self.clean_environment(data_sets=self.tmp_data_sets) + def get_error_hint(self, output): + """ + Takes a raw TSO XMIT output and parses the abend code and return code to provide an + appropriate error hint for the failure. + If parsing is not possible then return an empty string. + + Arguments: + output (str): Raw TSO XMIT output returned from ikjeft01 when the command fails. + """ + error_messages = dict(D37={"00000004": "There appears to be a space issue. Ensure that there is adequate space and log data sets are not full."}) + + sys_abend, reason_code, error_hint = "", "", "" + find_abend = re.findall(r"ABEND CODE.*REASON", output) + if find_abend: + try: + sys_abend = find_abend[0].split("ABEND CODE ")[1].split(" ")[0] + except IndexError: + return "" + + find_reason_code = re.findall(r"REASON CODE.*", output) + if find_reason_code: + try: + reason_code = find_reason_code[0].split("REASON CODE ")[1].split(" ")[0] + except IndexError: + return "" + + msg = "Operation failed with abend code {0} and reason code {1}. {2}" + if sys_abend in error_messages: + if reason_code in error_messages[sys_abend]: + error_hint = error_messages[sys_abend][reason_code] + return msg.format(sys_abend, reason_code, error_hint) + def run_module(): module = AnsibleModule( @@ -1196,6 +1283,7 @@ def run_module(): archive.find_targets() if archive.targets_exist(): + archive.compute_dest_size() archive.archive_targets() if archive.remove: archive.remove_targets() diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 8ac4f2e9d..9d92134e5 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -344,10 +344,9 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): ] ) @pytest.mark.parametrize( - "record_length", [80, 120, 1024] + "record_length", [80, 120] ) @pytest.mark.parametrize( - # "record_format", ["FB", "VB", "FBA", "VBA", "U"], "record_format", ["FB", "VB",], ) def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): @@ -417,10 +416,9 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record ] ) @pytest.mark.parametrize( - "record_length", [80, 120, 1024] + "record_length", [80, 120] ) @pytest.mark.parametrize( - # "record_format", ["FB", "VB", "FBA", "VBA", "U"], "record_format", ["FB", "VB",], ) def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): @@ -492,10 +490,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data @pytest.mark.parametrize( "record_length", [80], ) -@pytest.mark.parametrize( - "record_format", ["FB", "VB",], -) -def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set, record_length, record_format): +def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set, record_length): try: hosts = ansible_zos_module # Clean env @@ -507,7 +502,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d type=data_set.get("dstype"), state="present", record_length=record_length, - record_format=record_format, + record_format="FB", replace=True, ) # Create members if needed diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index a4bf5e007..831724f21 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -415,7 +415,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec ] ) @pytest.mark.parametrize( - "record_length", [80, 120, 1024] + "record_length", [80, 120] ) @pytest.mark.parametrize( "record_format", ["FB", "VB",], @@ -902,7 +902,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f ] ) @pytest.mark.parametrize( - "record_length", [80, 120, 1024] + "record_length", [80, 120] ) @pytest.mark.parametrize( "record_format", ["FB", "VB",], @@ -986,3 +986,27 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") tmp_folder.cleanup() + +def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): + try: + hosts = ansible_zos_module + tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") + # False path + source_path = "/tmp/OMVSADM.NULL" + + format_dict = dict(name='terse') + format_dict["format_options"] = dict(use_adrdssu=True) + + # Unarchive action + unarchive_result = hosts.all.zos_unarchive( + src=source_path, + format=format_dict, + remote_src=False, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("changed") is False + assert result.get("failed", False) is True + print(result) + finally: + tmp_folder.cleanup() From 9d6704daa5ab73623d63d84a19f906cae01d5db1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 29 Aug 2023 10:19:10 -0600 Subject: [PATCH 168/495] Enhancement, improve load module and program object support in zos_copy (#804) * Add is_executable as option * Changes lines on zos_copy to cover as many cases as possible * Delete unecesary print function and solve issue of definition * Add correct dataset type and record created * Ignore the sequetial that create errors * Delete unnrelated cases of executable copy * Cover cases of datasets memebers and USS * Remove the SEQ not supported and support all USS exe files * Test case to cover USS copy, delte unecesary print and add fragment * Solve long line and Unexpected spaces * Solve long line and Unexpected spaces * Remove other solution of bugfix * Solve details in zo * Remove is_ * Add test case to ensure behaviour for uss cases * Solve references of jcl calls * Change description option and test * Remove line in test module and add case of executable for is_compatible function * Solve identation problems * Solve ds backup assignation * Change requesteds * Remove white spaces * Add assignation * Change documents and ensrues proper work with objects in members already created * Remove spaces * Correct conditional * Check the size of correct way * Change corrections --------- Co-authored-by: Demetri <dimatos@gmail.com> --- ...load_module_and_program_object_support.yml | 6 + plugins/action/zos_copy.py | 1 + plugins/module_utils/copy.py | 2 +- plugins/modules/zos_copy.py | 143 ++++++++++++---- .../functional/modules/test_zos_copy_func.py | 154 +++++++++++++++--- 5 files changed, 253 insertions(+), 53 deletions(-) create mode 100644 changelogs/fragments/804-improved_load_module_and_program_object_support.yml diff --git a/changelogs/fragments/804-improved_load_module_and_program_object_support.yml b/changelogs/fragments/804-improved_load_module_and_program_object_support.yml new file mode 100644 index 000000000..07379c1e3 --- /dev/null +++ b/changelogs/fragments/804-improved_load_module_and_program_object_support.yml @@ -0,0 +1,6 @@ +minor_changes: +- zos_copy - includes a new option `executable` that enables copying of executables such + as load modules or program objects to both USS and partitioned data sets. When + the `dest` option contains a non-existent data set, `zos_copy` will create a data set with + the appropriate attributes for an executable. + (https://github.com/ansible-collections/ibm_zos_core/pull/804) \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index b557e8605..c6273132c 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -59,6 +59,7 @@ def run(self, tmp=None, task_vars=None): local_follow = _process_boolean(task_args.get('local_follow'), default=False) remote_src = _process_boolean(task_args.get('remote_src'), default=False) is_binary = _process_boolean(task_args.get('is_binary'), default=False) + executable = _process_boolean(task_args.get('executable'), default=False) ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=False) backup_name = task_args.get("backup_name", None) encoding = task_args.get("encoding", None) diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index b4ebaacc7..7edd8a49c 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2019-2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 6b5e8ab7f..c50fe8c64 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -77,12 +77,13 @@ - If C(dest) is a nonexistent USS file, it will be created. - If C(dest) is a nonexistent data set, it will be created following the process outlined here and in the C(volume) option. - - If C(dest) is a nonexistent data set, the attributes assigned will depend - on the type of C(src). If C(src) is a USS file, C(dest) will have a - Fixed Block (FB) record format and the remaining attributes will be computed. - If C(src) is binary, C(dest) will have a Fixed Block (FB) record format - with a record length of 80, block size of 32760, and the remaining - attributes will be computed. + - If C(dest) is a nonexistent data set, the attributes assigned will depend on the type of + C(src). If C(src) is a USS file, C(dest) will have a Fixed Block (FB) record format and the + remaining attributes will be computed. If I(is_binary=true), C(dest) will have a Fixed Block + (FB) record format with a record length of 80, block size of 32760, and the remaining + attributes will be computed. If I(executable=true),C(dest) will have an Undefined (U) record + format with a record length of 0, block size of 32760, and the remaining attributes will be + computed. - When C(dest) is a data set, precedence rules apply. If C(dest_data_set) is set, this will take precedence over an existing data set. If C(dest) is an empty data set, the empty data set will be written with the @@ -172,6 +173,20 @@ type: bool default: false required: false + executable: + description: + - If set to C(true), indicates that the file or library to be copied is an executable. + - If the C(src) executable has an alias, the alias information is also copied. If the + C(dest) is Unix, the alias is not visible in Unix, even though the information is there and + will be visible if copied to a library. + - If I(executable=true), and C(dest) is a data set, it must be a PDS or PDSE (library). + - If C(dest) is a nonexistent data set, the library attributes assigned will be + Undefined (U) record format with a record length of 0, block size of 32760 and the + remaining attributes will be computed. + - If C(dest) is a file, execute permission for the user will be added to the file (``u+x``). + type: bool + default: false + required: false local_follow: description: - This flag indicates that any existing filesystem links in the source tree @@ -386,6 +401,11 @@ transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + - Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member + into a PDSE that contains program objects. You can control this behavior using module option + executable that will signify an executable is being copied into a PDSE with other + executables. Mixing data type members with program objects will be responded with a + (FSUM8976,./zos_copy.html) error. seealso: - module: zos_fetch - module: zos_data_set @@ -553,6 +573,13 @@ space_type: K record_format: VB record_length: 150 + +- name: Copy a Program Object on remote system to a new PDSE member MYCOBOL. + zos_copy: + src: HLQ.COBOLSRC.PDSE(TESTPGM) + dest: HLQ.NEW.PDSE(MYCOBOL) + remote_src: true + executable: true """ RETURN = r""" @@ -750,6 +777,7 @@ def __init__( self, module, is_binary=False, + executable=False, backup_name=None ): """Utility class to handle copying data between two targets @@ -761,11 +789,14 @@ def __init__( Keyword Arguments: is_binary {bool} -- Whether the file or data set to be copied contains binary data + executable {bool} -- Whether the file or data set to be copied + is executable backup_name {str} -- The USS path or data set name of destination backup """ self.module = module self.is_binary = is_binary + self.executable = executable self.backup_name = backup_name def run_command(self, cmd, **kwargs): @@ -1037,6 +1068,7 @@ def __init__( self, module, is_binary=False, + executable=False, common_file_args=None, backup_name=None, ): @@ -1054,7 +1086,7 @@ def __init__( backup_name {str} -- The USS path or data set name of destination backup """ super().__init__( - module, is_binary=is_binary, backup_name=backup_name + module, is_binary=is_binary, executable=executable, backup_name=backup_name ) self.common_file_args = common_file_args @@ -1089,6 +1121,9 @@ def copy_to_uss( self._mvs_copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name ) + if self.executable: + status = os.stat(dest) + os.chmod(dest, status.st_mode | stat.S_IEXEC) else: norm_dest = os.path.normpath(dest) dest_parent_dir, tail = os.path.split(norm_dest) @@ -1157,6 +1192,9 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): copy.copy_uss2uss_binary(new_src, dest) else: shutil.copy(new_src, dest) + if self.executable: + status = os.stat(dest) + os.chmod(dest, status.st_mode | stat.S_IEXEC) except OSError as err: raise CopyOperationError( msg="Destination {0} is not writable".format(dest), @@ -1331,9 +1369,16 @@ def _mvs_copy_to_uss( os.mkdir(dest) except FileExistsError: pass + opts = dict() + if self.executable: + opts["options"] = "-IX" + try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: - response = datasets._copy(src, dest) + if self.executable: + response = datasets._copy(src, dest, None, **opts) + else: + response = datasets._copy(src, dest) if response.rc != 0: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), @@ -1352,6 +1397,7 @@ def __init__( self, module, is_binary=False, + executable=False, backup_name=None ): """ Utility class to handle copying to partitioned data sets or @@ -1369,6 +1415,7 @@ def __init__( super().__init__( module, is_binary=is_binary, + executable=executable, backup_name=backup_name ) @@ -1404,6 +1451,7 @@ def copy_to_pdse( dest_members = [] if src_ds_type == "USS": + if os.path.isfile(new_src): path = os.path.dirname(new_src) files = [os.path.basename(new_src)] @@ -1411,7 +1459,7 @@ def copy_to_pdse( path, dirs, files = next(os.walk(new_src)) src_members = [ - os.path.normpath("{0}/{1}".format(path, file)) if self.is_binary + os.path.normpath("{0}/{1}".format(path, file)) if (self.is_binary or self.executable) else normalize_line_endings("{0}/{1}".format(path, file), encoding) for file in files ] @@ -1493,21 +1541,12 @@ def copy_to_member( if self.is_binary: opts["options"] = "-B" + if self.executable: + opts["options"] = "-IX" + response = datasets._copy(src, dest, None, **opts) rc, out, err = response.rc, response.stdout_response, response.stderr_response - if rc != 0: - # ***************************************************************** - # An error occurs while attempting to write a data set member to a - # PDSE containing program object members, a PDSE cannot contain - # both program object members and data members. This can be - # resolved by copying the program object with a "-X" flag. - # ***************************************************************** - if ("FSUM8976" in err and "EDC5091I" in err) or ("FSUM8976" in out and "EDC5091I" in out): - opts["options"] = "-X" - response = datasets._copy(src, dest, None, **opts) - rc, out, err = response.rc, response.stdout_response, response.stderr_response - return dict( rc=rc, out=out, @@ -1710,7 +1749,8 @@ def is_compatible( copy_member, src_member, is_src_dir, - is_src_inline + is_src_inline, + executable ): """Determine whether the src and dest are compatible and src can be copied to dest. @@ -1722,6 +1762,7 @@ def is_compatible( src_member {bool} -- Whether src is a data set member. is_src_dir {bool} -- Whether the src is a USS directory. is_src_inline {bool} -- Whether the src comes from inline content. + executable {bool} -- Whether the src is a executable to be copied. Returns: {bool} -- Whether src can be copied to dest. @@ -1733,6 +1774,14 @@ def is_compatible( if dest_type is None: return True + # ******************************************************************** + # If source or destination is a sequential data set and executable as true + # is incompatible to execute the copy. + # ******************************************************************** + if executable: + if src_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_SEQ: + return False + # ******************************************************************** # If source is a sequential data set, then destination must be # partitioned data set member, other sequential data sets or USS files. @@ -1968,6 +2017,7 @@ def allocate_destination_data_set( dest_exists, force, is_binary, + executable, dest_data_set=None, volume=None ): @@ -1983,6 +2033,7 @@ def allocate_destination_data_set( dest_exists (bool) -- Whether the destination data set already exists. force (bool) -- Whether to replace an existent data set. is_binary (bool) -- Whether the data set will contain binary data. + executable (bool) -- Whether the data to copy is an executable dataset or file. dest_data_set (dict, optional) -- Parameters containing a full definition of the new data set; they will take precedence over any other allocation logic. volume (str, optional) -- Volume where the data set should be allocated into. @@ -2007,6 +2058,7 @@ def allocate_destination_data_set( return False, dest_params # Giving more priority to the parameters given by the user. + # Cover case the user set executable to true to create dataset valid. if dest_data_set: dest_params = dest_data_set dest_params["name"] = dest @@ -2033,33 +2085,59 @@ def allocate_destination_data_set( elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED and not dest_exists: # Taking the src as model if it's also a PDSE. if src_ds_type in data_set.DataSet.MVS_PARTITIONED: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) + if executable: + src_attributes = datasets.listing(src_name)[0] + size = int(src_attributes.total_space) + record_format = "U" + record_length = 0 + + dest_params = get_data_set_attributes( + dest, + size, + is_binary, + record_format=record_format, + record_length=record_length, + type="LIBRARY", + volume=volume + ) + data_set.DataSet.ensure_present(replace=force, **dest_params) + else: + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.listing(src_name)[0] # The size returned by listing is in bytes. size = int(src_attributes.total_space) record_format = src_attributes.recfm record_length = int(src_attributes.lrecl) - - dest_params = get_data_set_attributes(dest, size, is_binary, record_format=record_format, record_length=record_length, type="PDSE", volume=volume) + dest_params = get_data_set_attributes(dest, size, is_binary, record_format=record_format, record_length=record_length, type="PDSE", + volume=volume) data_set.DataSet.ensure_present(replace=force, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): # This is almost the same as allocating a sequential dataset. size = os.stat(src).st_size record_format = record_length = None + type_ds = "PDSE" - if not is_binary: + if is_binary: + record_format = "FB" + record_length = 80 + else: record_format = "FB" record_length = get_file_record_length(src) + if executable: + record_format = "U" + record_length = 0 + type_ds = "LIBRARY" + dest_params = get_data_set_attributes( dest, size, is_binary, record_format=record_format, record_length=record_length, - type="PDSE", + type=type_ds, volume=volume ) else: @@ -2182,6 +2260,7 @@ def run_module(module, arg_def): dest = module.params.get('dest') remote_src = module.params.get('remote_src') is_binary = module.params.get('is_binary') + executable = module.params.get('executable') backup = module.params.get('backup') backup_name = module.params.get('backup_name') validate = module.params.get('validate') @@ -2362,7 +2441,8 @@ def run_module(module, arg_def): copy_member, src_member, is_src_dir, - (src_ds_type == "USS" and src is None) + (src_ds_type == "USS" and src is None), + executable ): module.fail_json( msg="Incompatible target type '{0}' for source '{1}'".format( @@ -2465,6 +2545,7 @@ def run_module(module, arg_def): dest_exists, force, is_binary, + executable, dest_data_set=dest_data_set, volume=volume ) @@ -2492,6 +2573,7 @@ def run_module(module, arg_def): copy_handler = CopyHandler( module, is_binary=is_binary, + executable=executable, backup_name=backup_name ) @@ -2510,6 +2592,7 @@ def run_module(module, arg_def): uss_copy_handler = USSCopyHandler( module, is_binary=is_binary, + executable=executable, common_file_args=dict(mode=mode, group=group, owner=owner), backup_name=backup_name, ) @@ -2573,7 +2656,7 @@ def run_module(module, arg_def): temp_path = os.path.join(temp_path, os.path.basename(src)) pdse_copy_handler = PDSECopyHandler( - module, is_binary=is_binary, backup_name=backup_name + module, is_binary=is_binary, executable=executable, backup_name=backup_name ) pdse_copy_handler.copy_to_pdse( @@ -2618,6 +2701,7 @@ def main(): src=dict(type='path'), dest=dict(required=True, type='str'), is_binary=dict(type='bool', default=False), + executable=dict(type='bool', default=False), encoding=dict( type='dict', required=False, @@ -2718,6 +2802,7 @@ def main(): src=dict(arg_type='data_set_or_path', required=False), dest=dict(arg_type='data_set_or_path', required=True), is_binary=dict(arg_type='bool', required=False, default=False), + executable=dict(arg_type='bool', required=False, default=False), content=dict(arg_type='str', required=False), backup=dict(arg_type='bool', default=False, required=False), backup_name=dict(arg_type='data_set_or_path', required=False), diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 5604527a3..dd0114fae 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -150,6 +150,23 @@ """ +hello_world = """#include <stdio.h> +int main() +{ + printf("Hello World!"); + return 0; +} +""" + +call_c_hello_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH /tmp/c/hello_world +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + c_pgm="""#include <stdio.h> #include <stdlib.h> #include <string.h> @@ -298,8 +315,6 @@ def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): dest="/tmp/link.jcl", force=True, ) - for res in cp_res.contacted.values(): - print("copy link program result {0}".format(res)) # Link the temp ds with ds_name job_result = hosts.all.zos_job_submit( src="/tmp/link.jcl", @@ -307,7 +322,7 @@ def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): wait_time_s=60 ) for result in job_result.contacted.values(): - print("link job submit result {0}".format(result)) + #print("link job submit result {0}".format(result)) rc = result.get("jobs")[0].get("ret_code").get("code") finally: hosts.all.file(path=temp_jcl, state="absent") @@ -1551,8 +1566,7 @@ def test_copy_dest_lock(ansible_zos_module): results = hosts.all.zos_copy( src = DATASET_2 + "({0})".format(MEMBER_1), dest = DATASET_1 + "({0})".format(MEMBER_1), - remote_src = True, - force = True + remote_src = True ) for result in results.contacted.values(): print(result) @@ -2493,13 +2507,15 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): @pytest.mark.pdse -def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): +@pytest.mark.parametrize("is_created", ["true", "false"]) +def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_created): hosts = ansible_zos_module # The volume for this dataset should use a system symbol. # This dataset and member should be available on any z/OS system. src = "USER.LOAD.SRC" dest = "USER.LOAD.DEST" cobol_pds = "USER.COBOL.SRC" + uss_dest = "/tmp/HELLO" try: hosts.all.zos_data_set( name=src, @@ -2512,18 +2528,18 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): space_type="M", replace=True ) - - hosts.all.zos_data_set( - name=dest, - state="present", - type="pdse", - record_format="U", - record_length=0, - block_size=32760, - space_primary=2, - space_type="M", - replace=True - ) + if is_created: + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) hosts.all.zos_data_set( name=cobol_pds, @@ -2539,7 +2555,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): cobol_pds = "{0}({1})".format(cobol_pds, member) rc = hosts.all.zos_copy( content=COBOL_SRC, - dest=cobol_pds, + dest=cobol_pds ) dest_name = "{0}({1})".format(dest, member) src_name = "{0}({1})".format(src, member) @@ -2561,11 +2577,12 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): ) for result in exec_res.contacted.values(): assert result.get("rc") == 0 - + # Execute the copy from pdse to another with executable and validate it copy_res = hosts.all.zos_copy( - src="{0}({1})".format(src, member), - dest="{0}({1})".format(dest, "MEM1"), - remote_src=True) + src="{0}({1})".format(src, member), + dest="{0}({1})".format(dest, "MEM1"), + remote_src=True, + executable=True) verify_copy = hosts.all.shell( cmd="mls {0}".format(dest), @@ -2583,11 +2600,102 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module,): assert stdout is not None # number of members assert len(stdout.splitlines()) == 2 + # Copy to a uss file executable from the library execute and validate + copy_uss_res = hosts.all.zos_copy( + src="{0}({1})".format(dest, "MEM1"), + dest=uss_dest, + remote_src=True, + executable=True, + force=True) + + for result in copy_uss_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + + verify_exe_uss = hosts.all.shell( + cmd="{0}".format(uss_dest) + ) + + for v_cp_u in verify_exe_uss.contacted.values(): + assert v_cp_u.get("rc") == 0 + stdout = v_cp_u.get("stdout") + assert "SIMPLE HELLO WORLD" in str(stdout) finally: hosts.all.zos_data_set(name=dest, state="absent") hosts.all.zos_data_set(name=src, state="absent") hosts.all.zos_data_set(name=cobol_pds, state="absent") + hosts.all.file(name=uss_dest, state="absent") + + +@pytest.mark.pdse +@pytest.mark.uss +@pytest.mark.parametrize("is_created", ["true", "false"]) +def test_copy_executables_uss_to_member(ansible_zos_module, is_created): + hosts= ansible_zos_module + src= "/tmp/c/hello_world.c" + src_jcl_call= "/tmp/c/call_hw_pgm.jcl" + dest_uss="/tmp/c/hello_world_2" + dest = "USER.LOAD.DEST" + member = "HELLOSRC" + try: + hosts.all.zos_copy(content=hello_world, dest=src, force=True) + hosts.all.zos_copy(content=call_c_hello_jcl, dest=src_jcl_call, force=True) + hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir="/tmp/c/") + hosts.all.shell(cmd="submit {0}".format(src_jcl_call)) + verify_exe_src = hosts.all.shell(cmd="/tmp/c/hello_world") + for res in verify_exe_src.contacted.values(): + assert res.get("rc") == 0 + stdout = res.get("stdout") + assert "Hello World" in str(stdout) + copy_uss_res = hosts.all.zos_copy( + src="/tmp/c/hello_world", + dest=dest_uss, + remote_src=True, + executable=True, + force=True + ) + verify_exe_dst = hosts.all.shell(cmd="/tmp/c/hello_world_2") + for result in copy_uss_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + for res in verify_exe_dst.contacted.values(): + assert res.get("rc") == 0 + stdout = res.get("stdout") + assert "Hello World" in str(stdout) + if is_created: + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + copy_uss_to_mvs_res = hosts.all.zos_copy( + src="/tmp/c/hello_world", + dest="{0}({1})".format(dest, member), + remote_src=True, + executable=True, + force=True + ) + cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" + exec_res = hosts.all.shell( + cmd=cmd.format(member, dest) + ) + for result in copy_uss_to_mvs_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + for res in exec_res.contacted.values(): + assert res.get("rc") == 0 + stdout = res.get("stdout") + assert "Hello World" in str(stdout) + finally: + hosts.all.shell(cmd='rm -r /tmp/c') + hosts.all.zos_data_set(name=dest, state="absent") @pytest.mark.pdse From 21b5008da8e041cfb42ab8efef2aac9621c0c35a Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 29 Aug 2023 15:45:48 -0600 Subject: [PATCH 169/495] Updated mounts.evn with latests zoau mounts --- scripts/mounts.env | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/mounts.env b/scripts/mounts.env index 876876cd3..aa325383c 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -39,7 +39,9 @@ zoau_mount_list_str="1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ "11:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ "12:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ "13:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ -"14:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " +"14:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ +"15:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V125.ZFS "\ +"16:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " # ------------------------------------------------------------------------------ # PYTHON MOUNT TABLE From 3a8c32e36487269e0fd020d6966ebbcced8c212f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 29 Aug 2023 15:52:27 -0600 Subject: [PATCH 170/495] Added changelog --- changelogs/fragments/959-ac-tool-update-mounts.yml | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 changelogs/fragments/959-ac-tool-update-mounts.yml diff --git a/changelogs/fragments/959-ac-tool-update-mounts.yml b/changelogs/fragments/959-ac-tool-update-mounts.yml new file mode 100644 index 000000000..4eb90122d --- /dev/null +++ b/changelogs/fragments/959-ac-tool-update-mounts.yml @@ -0,0 +1,3 @@ +trivial: +- ac - Add ZOAU 1.2.4 and 1.2.5 mounts. + (https://github.com/ansible-collections/ibm_zos_core/pull/959) \ No newline at end of file From 8506b623d0039a7dd03787637a15fdf69c96a41d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Wed, 30 Aug 2023 13:16:55 -0600 Subject: [PATCH 171/495] Bugfix/815/zos job submit truncates final character of input (#952) * Add first version of the test * Check it * Remove line * Add fragment --- ...os-job-submit-truncate-final-character.yml | 4 ++++ .../modules/test_zos_job_submit_func.py | 22 +++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 changelogs/fragments/952-zos-job-submit-truncate-final-character.yml diff --git a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml new file mode 100644 index 000000000..b9413e31b --- /dev/null +++ b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml @@ -0,0 +1,4 @@ +bugfix: +- zos_job_submit: The last line of the jcl was missing in the input. + Fix now ensures the presence of the full input in job_submit. + (https://github.com/ansible-collections/ibm_zos_core/pull/952) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index b7b1ec5f0..b93b448c7 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -254,6 +254,10 @@ // """ +JCL_FULL_INPUT="""//HLQ0 JOB MSGLEVEL=(1,1), +// MSGCLASS=A,CLASS=A,NOTIFY=&SYSUID +//STEP1 EXEC PGM=BPXBATCH,PARM='PGM /bin/sleep 5'""" + TEMP_PATH = "/tmp/jcl" DATA_SET_NAME = "imstestl.ims1.test05" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" @@ -612,6 +616,24 @@ def test_job_submit_jinja_template(ansible_zos_module, args): os.remove(tmp_file.name) +def test_job_submit_full_input(ansible_zos_module): + try: + hosts = ansible_zos_module + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FULL_INPUT), TEMP_PATH) + ) + results = hosts.all.zos_job_submit( + src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + ) + for result in results.contacted.values(): + print(result) + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True + finally: + hosts.all.file(path=TEMP_PATH, state="absent") + def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: From 2d92df5b8366007de1b8dea15bd4db0223d67035 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 4 Sep 2023 14:59:00 -0600 Subject: [PATCH 172/495] =?UTF-8?q?Add=20fix=20for=20change=20copy=20built?= =?UTF-8?q?-in=20for=20zos=5Fcopy=20and=20remove=20remain=20files=E2=80=A6?= =?UTF-8?q?=20(#951)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add fix for change copy built-in for zos_copy and remove remain files in ansible/temp * Add fragment * Change spaces * Remove deletes * Remove temp files * Change fragment and update if zos_copy fails --- ...or-zos-copy-and-remove-temporary-files.yml | 7 +++ plugins/action/zos_job_submit.py | 48 +++++++++++++------ 2 files changed, 40 insertions(+), 15 deletions(-) create mode 100644 changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml diff --git a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml new file mode 100644 index 000000000..c90921c9f --- /dev/null +++ b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml @@ -0,0 +1,7 @@ +bugfixes: + - zos_job_submit: Temporary files were created in tmp directory. + Fix now ensures the deletion of files every time the module run. + (https://github.com/ansible-collections/ibm_zos_core/pull/951) +minor_changes: + - zos_job_submit: Change action plugin call from copy to zos_copy. + (https://github.com/ansible-collections/ibm_zos_core/pull/951) \ No newline at end of file diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 715ce57ed..db3fb1fd7 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -15,12 +15,18 @@ from ansible.plugins.action import ActionBase from ansible.errors import AnsibleError, AnsibleFileNotFound +from ansible.utils.display import Display # from ansible.module_utils._text import to_bytes, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.parsing.convert_bool import boolean import os +import copy from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template +from ansible_collections.ibm.ibm_zos_core.plugins.action.zos_copy import ActionModule as ZosCopyActionModule + + +display = Display() class ActionModule(ActionBase): @@ -148,26 +154,38 @@ def run(self, tmp=None, task_vars=None): src=tmp_src, dest=dest_path, mode="0600", - _original_basename=source_rel, - ) - ) - result.update( - self._execute_module( - module_name="copy", - module_args=copy_module_args, - task_vars=task_vars, + force=True, + remote_src=True, ) ) - result.update( - self._execute_module( - module_name="ibm.ibm_zos_core.zos_job_submit", - module_args=module_args, - task_vars=task_vars, + copy_task = copy.deepcopy(self._task) + copy_task.args = copy_module_args + zos_copy_action_module = ZosCopyActionModule(task=copy_task, + connection=self._connection, + play_context=self._play_context, + loader=self._loader, + templar=self._templar, + shared_loader_obj=self._shared_loader_obj) + result.update(zos_copy_action_module.run(task_vars=task_vars)) + if result.get("msg") is None: + module_args["src"] = dest_path + result.update( + self._execute_module( + module_name="ibm.ibm_zos_core.zos_job_submit", + module_args=module_args, + task_vars=task_vars, + ) ) - ) - + else: + result.update(dict(failed=True)) if rendered_file: os.remove(rendered_file) + if os.path.isfile(tmp_src): + self._connection.exec_command("rm -rf {0}".format(tmp_src)) + if os.path.isfile(dest_file): + self._connection.exec_command("rm -rf {0}".format(dest_file)) + if os.path.isfile(source_full): + self._connection.exec_command("rm -rf {0}".format(source_full)) else: result.update( From f5ec02009f4caa8c90d1d7fb66ee4ddbacbb5afe Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 8 Sep 2023 18:23:06 -0600 Subject: [PATCH 173/495] Corrected changelog tag from bugfix to bugfixes (#963) --- .../fragments/916-zos-lineinfile-does-not-behave-community.yml | 2 +- .../fragments/918-zos-operator-response-come-back-truncate.yaml | 2 +- .../fragments/952-zos-job-submit-truncate-final-character.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml index c1639c769..9b13df055 100644 --- a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml +++ b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml @@ -1,4 +1,4 @@ -bugfix: +bugfixes: - zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) \ No newline at end of file diff --git a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml index ef5ae8b36..58900fc01 100644 --- a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml +++ b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml @@ -1,4 +1,4 @@ -bugfix: +bugfixes: - zos_operator: The last line of the operator was missing in the response of the module. Fix now ensures the presence of the full output of the operator. (https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file diff --git a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml index b9413e31b..aca865791 100644 --- a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml +++ b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml @@ -1,4 +1,4 @@ -bugfix: +bugfixes: - zos_job_submit: The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) \ No newline at end of file From 39b439c204f9adbfcdf4ddc4921f6b41cd1dd9f7 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Tue, 12 Sep 2023 12:04:32 -0700 Subject: [PATCH 174/495] modify get_data_set_attributes function (#964) * modify get_data_set_attributes function to honor incoming param values instead of overwriting in the case of is_binary=True Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- ...odify-get_data_set_attributes-function.yml | 3 +++ plugins/modules/zos_copy.py | 22 ++++++++++++++----- 2 files changed, 19 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/964-modify-get_data_set_attributes-function.yml diff --git a/changelogs/fragments/964-modify-get_data_set_attributes-function.yml b/changelogs/fragments/964-modify-get_data_set_attributes-function.yml new file mode 100644 index 000000000..da384c77b --- /dev/null +++ b/changelogs/fragments/964-modify-get_data_set_attributes-function.yml @@ -0,0 +1,3 @@ +trivial: +- zos_copy - modify get_data_set_attributes helper function to no longer overwrite caller-defined attributes. + (https://github.com/ansible-collections/ibm_zos_core/pull/964) \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index c50fe8c64..aabd5447e 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1613,8 +1613,8 @@ def get_data_set_attributes( name, size, is_binary, - record_format="VB", - record_length=1028, + record_format=None, + record_length=None, type="SEQ", volume=None ): @@ -1649,11 +1649,21 @@ def get_data_set_attributes( space_primary = space_primary + int(math.ceil(space_primary * 0.05)) space_secondary = int(math.ceil(space_primary * 0.10)) - # Overwriting record_format and record_length when the data set has binary data. - if is_binary: - record_format = "FB" - record_length = 80 + # set default value - record_format + if record_format is None: + if is_binary: + record_format = "FB" + else: + record_format = "VB" + + # set default value - record_length + if record_length is None: + if is_binary: + record_length = 80 + else: + record_length = 1028 + # compute block size max_block_size = 32760 if record_format == "FB": # Computing the biggest possible block size that doesn't exceed From 81c1f88ee0dbc7ea119dc4bd6579525c93942c26 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 12 Sep 2023 14:14:51 -0600 Subject: [PATCH 175/495] Add python 3.11-3 to ac mount tables (#966) * Add python 3.11-3 to mount tables Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added changelog fragment Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- changelogs/fragments/966-ac-tool-add-python-311-3.yml | 3 +++ scripts/mounts.env | 6 ++++-- 2 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/966-ac-tool-add-python-311-3.yml diff --git a/changelogs/fragments/966-ac-tool-add-python-311-3.yml b/changelogs/fragments/966-ac-tool-add-python-311-3.yml new file mode 100644 index 000000000..231d3e2be --- /dev/null +++ b/changelogs/fragments/966-ac-tool-add-python-311-3.yml @@ -0,0 +1,3 @@ +trivial: +- ac - Add python 3.11-3 mount table. + (https://github.com/ansible-collections/ibm_zos_core/pull/966) \ No newline at end of file diff --git a/scripts/mounts.env b/scripts/mounts.env index aa325383c..050887102 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -58,7 +58,8 @@ python_mount_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz:/allpyt "3:3.9:/allpython/3.9/usr/lpp/IBM/cyp/v3r9/pyz:/allpython/3.9:IMSTESTU.PYZ.V39016.ZFS "\ "4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz:/allpython/3.10:IMSTESTU.PYZ.V3A09.ZFS "\ "5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11:IMSTESTU.PYZ.V3B02.ZFS "\ -"6:3.11-ga:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS " +"6:3.11-ga:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS "\ +"7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-3:IMSTESTU.PYZ.V3B03.ZFS " # ------------------------------------------------------------------------------ # PYTHON PATH POINTS @@ -75,4 +76,5 @@ python_path_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz "\ "3:3.9:/allpython/3.9/usr/lpp/IBM/cyp/v3r9/pyz "\ "4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz "\ "5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz "\ -"6:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz " \ No newline at end of file +"6:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz "\ +"7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz " \ No newline at end of file From 0ce455c484dccc90ada9582957baa1561743e097 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 13 Sep 2023 16:36:03 -0600 Subject: [PATCH 176/495] Enhance zos_archive and zos_unarchive test cases (#965) * Added test_lines specific to the record length * Modified test to add characters length to test Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Enhanced zos_unarchive test cases Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added changelog Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added data integrity check --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/965-enhance-archive-tests.yml | 5 +++ .../modules/test_zos_archive_func.py | 26 ++++++++------ .../modules/test_zos_unarchive_func.py | 36 +++++++++++++++---- 3 files changed, 50 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/965-enhance-archive-tests.yml diff --git a/changelogs/fragments/965-enhance-archive-tests.yml b/changelogs/fragments/965-enhance-archive-tests.yml new file mode 100644 index 000000000..b86bf22bf --- /dev/null +++ b/changelogs/fragments/965-enhance-archive-tests.yml @@ -0,0 +1,5 @@ +minor_changes: + - zos_archive: Enhanced test cases to use test lines the same length of the record length. + (https://github.com/ansible-collections/ibm_zos_core/pull/965) + - zos_unarchive: Enhanced test cases to use test lines the same length of the record length. + (https://github.com/ansible-collections/ibm_zos_core/pull/965) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 9d92134e5..2705a7137 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -347,7 +347,7 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["FB", "VB"], ) def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -372,8 +372,12 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -419,7 +423,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["FB", "VB"], ) def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -444,8 +448,12 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -487,10 +495,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), ] ) -@pytest.mark.parametrize( - "record_length", [80], -) -def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set, record_length): +def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module # Clean env @@ -501,7 +506,6 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d name=data_set.get("name"), type=data_set.get("dstype"), state="present", - record_length=record_length, record_format="FB", replace=True, ) diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 831724f21..46a1e8534 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -344,8 +344,12 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -397,6 +401,11 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): assert data_set.get("name") in c_result.get("stdout") + + # Check data integrity after unarchive + cat_result = hosts.all.shell(cmd=f"dcat \"{ds_to_write}\"") + for result in cat_result.contacted.values(): + assert result.get("stdout") == test_line finally: hosts.all.zos_data_set(name=data_set.get("name"), state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") @@ -442,8 +451,12 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -930,8 +943,12 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da type="member", state="present" ) - # Write some content into src - test_line = "this is a test line" + # Write some content into src the same size of the record, + # need to reduce 4 from V and VB due to RDW + if record_format in ["V", "VB"]: + test_line = "a" * (record_length - 4) + else: + test_line = "a" * record_length for member in data_set.get("members"): if member == "": ds_to_write = f"{data_set.get('name')}" @@ -981,6 +998,13 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): assert data_set.get("name") in c_result.get("stdout") + + # Check data integrity after unarchive + cat_result = hosts.all.shell(cmd=f"dcat \"{ds_to_write}\"") + for result in cat_result.contacted.values(): + assert result.get("stdout") == test_line + + finally: hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") From e2ad0ee14c9a4132563b262431af0602ff19fe30 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 14 Sep 2023 14:56:55 -0400 Subject: [PATCH 177/495] Initial commit to add LIBRARY to choices and docs --- changelogs/fragments/920-zos-copy-add-library-choice.yml | 4 ++++ plugins/modules/zos_copy.py | 3 ++- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/920-zos-copy-add-library-choice.yml diff --git a/changelogs/fragments/920-zos-copy-add-library-choice.yml b/changelogs/fragments/920-zos-copy-add-library-choice.yml new file mode 100644 index 000000000..cb30191c1 --- /dev/null +++ b/changelogs/fragments/920-zos-copy-add-library-choice.yml @@ -0,0 +1,4 @@ +bugfixes: +- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. + Documentation updated to reflect this change. + (https://github.com/ansible-collections/ibm_zos_core/pull/). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index aabd5447e..b87845fab 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -283,6 +283,7 @@ - PDSE - MEMBER - BASIC + - LIBRARY space_primary: description: - If the destination I(dest) data set does not exist , this sets the @@ -2741,7 +2742,7 @@ def main(): type=dict( type='str', choices=['BASIC', 'KSDS', 'ESDS', 'RRDS', - 'LDS', 'SEQ', 'PDS', 'PDSE', 'MEMBER'], + 'LDS', 'SEQ', 'PDS', 'PDSE', 'MEMBER', 'LIBRARY'], required=True, ), space_primary=dict( From 7515e8ce4a2354fc6eb3b441a9cc469909db6328 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 14 Sep 2023 15:01:50 -0400 Subject: [PATCH 178/495] added PR value to fragment --- changelogs/fragments/920-zos-copy-add-library-choice.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/920-zos-copy-add-library-choice.yml b/changelogs/fragments/920-zos-copy-add-library-choice.yml index cb30191c1..2d339227b 100644 --- a/changelogs/fragments/920-zos-copy-add-library-choice.yml +++ b/changelogs/fragments/920-zos-copy-add-library-choice.yml @@ -1,4 +1,4 @@ bugfixes: - zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. - (https://github.com/ansible-collections/ibm_zos_core/pull/). \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/968). \ No newline at end of file From 0be6c693be3bfb06f84e8f54d16f6df2b71a3812 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 19 Sep 2023 10:09:37 -0600 Subject: [PATCH 179/495] Simplify loaldlib test cases (#969) * Simplify loaldlib test cases * Add fragment * Add link for PR * Remove identation --- .../969-Simplify_loadlib_test_cases.yml | 3 + .../functional/modules/test_zos_copy_func.py | 184 ++++++++++++------ 2 files changed, 124 insertions(+), 63 deletions(-) create mode 100644 changelogs/fragments/969-Simplify_loadlib_test_cases.yml diff --git a/changelogs/fragments/969-Simplify_loadlib_test_cases.yml b/changelogs/fragments/969-Simplify_loadlib_test_cases.yml new file mode 100644 index 000000000..ce2060ed8 --- /dev/null +++ b/changelogs/fragments/969-Simplify_loadlib_test_cases.yml @@ -0,0 +1,3 @@ +trivial: +- zos_copy - Divide large test case for loadlibs and simplify functions. + (https://github.com/ansible-collections/ibm_zos_core/pull/969) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index dd0114fae..2bcf59a21 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -328,6 +328,32 @@ def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): hosts.all.file(path=temp_jcl, state="absent") return rc +def generate_executable_ds(hosts, src, dest, cobol): + member = "HELLOSRC" + hosts.all.zos_copy(content=COBOL_SRC, dest=cobol) + dest_name = "{0}({1})".format(dest, member) + src_name = "{0}({1})".format(src, member) + rc = link_loadlib_from_cobol(hosts, dest_name, cobol) + assert rc == 0 + cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" + hosts.all.shell(cmd=cmd.format(member, dest)) + rc = link_loadlib_from_cobol(hosts, src_name, cobol) + hosts.all.shell(cmd=cmd.format(member, src)) + assert rc == 0 + exec_res = hosts.all.shell(cmd=cmd.format(member, src)) + for result in exec_res.contacted.values(): + assert result.get("rc") == 0 + +def generate_executable_uss(hosts, src, src_jcl_call): + hosts.all.zos_copy(content=hello_world, dest=src, force=True) + hosts.all.zos_copy(content=call_c_hello_jcl, dest=src_jcl_call, force=True) + hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir="/tmp/c/") + hosts.all.shell(cmd="submit {0}".format(src_jcl_call)) + verify_exe_src = hosts.all.shell(cmd="/tmp/c/hello_world") + for res in verify_exe_src.contacted.values(): + assert res.get("rc") == 0 + stdout = res.get("stdout") + assert "Hello World" in str(stdout) @pytest.mark.uss @pytest.mark.parametrize("src", [ @@ -2515,7 +2541,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr src = "USER.LOAD.SRC" dest = "USER.LOAD.DEST" cobol_pds = "USER.COBOL.SRC" - uss_dest = "/tmp/HELLO" + dest_exe = "USER.LOAD.EXE" try: hosts.all.zos_data_set( name=src, @@ -2528,19 +2554,17 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr space_type="M", replace=True ) - if is_created: - hosts.all.zos_data_set( - name=dest, - state="present", - type="pdse", - record_format="U", - record_length=0, - block_size=32760, - space_primary=2, - space_type="M", - replace=True - ) - + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) hosts.all.zos_data_set( name=cobol_pds, state="present", @@ -2553,61 +2577,94 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr ) member = "HELLOSRC" cobol_pds = "{0}({1})".format(cobol_pds, member) - rc = hosts.all.zos_copy( - content=COBOL_SRC, - dest=cobol_pds - ) - dest_name = "{0}({1})".format(dest, member) - src_name = "{0}({1})".format(src, member) - # both src and dest need to be a loadlib - rc = link_loadlib_from_cobol(hosts, dest_name, cobol_pds) - assert rc == 0 - # make sure is executable - cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" - exec_res = hosts.all.shell( - cmd=cmd.format(member, dest) - ) - for result in exec_res.contacted.values(): - assert result.get("rc") == 0 - rc = link_loadlib_from_cobol(hosts, src_name, cobol_pds) - assert rc == 0 - - exec_res = hosts.all.shell( - cmd=cmd.format(member, src) - ) - for result in exec_res.contacted.values(): - assert result.get("rc") == 0 - # Execute the copy from pdse to another with executable and validate it + generate_executable_ds(hosts, src, dest, cobol_pds) + if is_created: + hosts.all.zos_data_set( + name=dest_exe, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) copy_res = hosts.all.zos_copy( src="{0}({1})".format(src, member), - dest="{0}({1})".format(dest, "MEM1"), + dest="{0}({1})".format(dest_exe, "MEM1"), remote_src=True, executable=True) verify_copy = hosts.all.shell( - cmd="mls {0}".format(dest), + cmd="mls {0}".format(dest_exe), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True - assert result.get("dest") == "{0}({1})".format(dest, "MEM1") + assert result.get("dest") == "{0}({1})".format(dest_exe, "MEM1") for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 stdout = v_cp.get("stdout") assert stdout is not None - # number of members - assert len(stdout.splitlines()) == 2 - # Copy to a uss file executable from the library execute and validate + finally: + hosts.all.zos_data_set(name=dest, state="absent") + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=cobol_pds, state="absent") + +@pytest.mark.pdse +@pytest.mark.uss +def test_copy_pds_loadlib_member_to_uss(ansible_zos_module): + hosts = ansible_zos_module + src = "USER.LOAD.SRC" + dest = "USER.LOAD.DEST" + cobol_pds = "USER.COBOL.SRC" + uss_dest = "/tmp/HELLO" + try: + hosts.all.zos_data_set( + name=src, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + hosts.all.zos_data_set( + name=dest, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + hosts.all.zos_data_set( + name=cobol_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + member = "HELLOSRC" + cobol_pds = "{0}({1})".format(cobol_pds, member) + generate_executable_ds(hosts, src, dest, cobol_pds) copy_uss_res = hosts.all.zos_copy( - src="{0}({1})".format(dest, "MEM1"), + src="{0}({1})".format(src, member), dest=uss_dest, remote_src=True, executable=True, force=True) - for result in copy_uss_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True @@ -2615,12 +2672,10 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr verify_exe_uss = hosts.all.shell( cmd="{0}".format(uss_dest) ) - for v_cp_u in verify_exe_uss.contacted.values(): assert v_cp_u.get("rc") == 0 stdout = v_cp_u.get("stdout") assert "SIMPLE HELLO WORLD" in str(stdout) - finally: hosts.all.zos_data_set(name=dest, state="absent") hosts.all.zos_data_set(name=src, state="absent") @@ -2628,26 +2683,14 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.file(name=uss_dest, state="absent") -@pytest.mark.pdse @pytest.mark.uss -@pytest.mark.parametrize("is_created", ["true", "false"]) -def test_copy_executables_uss_to_member(ansible_zos_module, is_created): +def test_copy_executables_uss_to_uss(ansible_zos_module): hosts= ansible_zos_module src= "/tmp/c/hello_world.c" src_jcl_call= "/tmp/c/call_hw_pgm.jcl" dest_uss="/tmp/c/hello_world_2" - dest = "USER.LOAD.DEST" - member = "HELLOSRC" try: - hosts.all.zos_copy(content=hello_world, dest=src, force=True) - hosts.all.zos_copy(content=call_c_hello_jcl, dest=src_jcl_call, force=True) - hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir="/tmp/c/") - hosts.all.shell(cmd="submit {0}".format(src_jcl_call)) - verify_exe_src = hosts.all.shell(cmd="/tmp/c/hello_world") - for res in verify_exe_src.contacted.values(): - assert res.get("rc") == 0 - stdout = res.get("stdout") - assert "Hello World" in str(stdout) + generate_executable_uss(hosts, src, src_jcl_call) copy_uss_res = hosts.all.zos_copy( src="/tmp/c/hello_world", dest=dest_uss, @@ -2663,6 +2706,21 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): assert res.get("rc") == 0 stdout = res.get("stdout") assert "Hello World" in str(stdout) + finally: + hosts.all.shell(cmd='rm -r /tmp/c') + + +@pytest.mark.pdse +@pytest.mark.uss +@pytest.mark.parametrize("is_created", ["true", "false"]) +def test_copy_executables_uss_to_member(ansible_zos_module, is_created): + hosts= ansible_zos_module + src= "/tmp/c/hello_world.c" + src_jcl_call= "/tmp/c/call_hw_pgm.jcl" + dest = "USER.LOAD.DEST" + member = "HELLOSRC" + try: + generate_executable_uss(hosts, src, src_jcl_call) if is_created: hosts.all.zos_data_set( name=dest, From dfe1ba8abc883cfb259825d8167d07077c4cb67d Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 19 Sep 2023 13:13:23 -0400 Subject: [PATCH 180/495] Change implemented in zos_operator Working on zos_operator_action_query Added initial changelog fragment --- ...nhance-Add-wait-zos-operator-and-query.yml | 5 +++++ plugins/modules/zos_operator.py | 22 +++++++++---------- plugins/modules/zos_operator_action_query.py | 2 ++ 3 files changed, 18 insertions(+), 11 deletions(-) create mode 100644 changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml new file mode 100644 index 000000000..4067471dc --- /dev/null +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -0,0 +1,5 @@ +enhancements: + - zos_operator: Added the 'wait' parameter back in to use the new -w operator. + (https://github.com/ansible-collections/ibm_zos_core/pull/xxx) + - zos_operator_action_query: Add wait_time_s and 'wait' parameters in the operator_action_query. + (https://github.com/ansible-collections/ibm_zos_core/pull/xxx) \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 5bd04ba50..29fc25817 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -57,14 +57,11 @@ default: 1 wait: description: - - Configuring wait used by the L(zos_operator,./zos_operator.html) module - has been deprecated and will be removed in a future ibm.ibm_zos_core - collection. - - Setting this option will yield no change, it is deprecated. - - Review option I(wait_time_s) to instruct operator commands to wait. + - Setting this option will tell opercmd to wait the full wait_time, instead + of returning on first data received type: bool required: false - default: true + default: false """ EXAMPLES = r""" @@ -81,12 +78,13 @@ zos_operator: cmd: "\\$PJ(*)" -- name: Execute operator command to show jobs, waiting up to 5 seconds for response +- name: Execute operator command to show jobs, always waiting 8 seconds for response zos_operator: cmd: 'd a,all' wait_time_s: 5 + wait: true -- name: Execute operator command to show jobs, always waiting 7 seconds for response +- name: Execute operator command to show jobs, waiting up to 7 seconds for response zos_operator: cmd: 'd a,all' wait_time_s: 7 @@ -195,7 +193,7 @@ def run_module(): cmd=dict(type="str", required=True), verbose=dict(type="bool", required=False, default=False), wait_time_s=dict(type="int", required=False, default=1), - wait=dict(type="bool", required=False, default=True), + wait=dict(type="bool", required=False, default=False), ) result = dict(changed=False) @@ -266,8 +264,7 @@ def parse_params(params): cmd=dict(arg_type="str", required=True), verbose=dict(arg_type="bool", required=False), wait_time_s=dict(arg_type="int", required=False), - wait=dict(arg_type="bool", required=False, removed_at_date='2022-11-30', - removed_from_collection='ibm.ibm_zos_core'), + wait=dict(arg_type="bool", required=False), ) parser = BetterArgParser(arg_defs) new_params = parser.parse_args(params) @@ -286,6 +283,9 @@ def run_operator_command(params): wait_s = params.get("wait_time_s") cmdtxt = params.get("cmd") + if params.get("wait"): + kwargs.update({"wait_arg": True}) + args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 10d096b48..0211f8a4b 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -398,6 +398,8 @@ def handle_conditions(list, condition_type, value): def execute_command(operator_cmd): response = opercmd.execute(operator_cmd) +# response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) + rc = response.rc stdout = response.stdout_response stderr = response.stderr_response From 4961784de3b409eaa319d2de42db91c97a096354 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 11:43:15 -0400 Subject: [PATCH 181/495] cleaned up note in zos_operator Added wait time and wait values to zoaq --- plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 62 ++++++++++++++++++-- 2 files changed, 58 insertions(+), 6 deletions(-) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 29fc25817..2dfa12fdb 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -57,7 +57,7 @@ default: 1 wait: description: - - Setting this option will tell opercmd to wait the full wait_time, instead + - Setting this option will tell the system to wait the full wait_time, instead of returning on first data received type: bool required: false diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 0211f8a4b..ddef406eb 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -57,6 +57,26 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false + wait_time_s: + description: + - Set maximum time in seconds to wait for the commands to execute. + - When set to 0, the system default is used. + - This option is helpful on a busy system requiring more time to execute + commands. + - Setting I(wait) can instruct if execution should wait the + full I(wait_time_s). + - Because 2 functions are called, potential time delay is doubled. + type: int + required: false + default: 1 + wait: + description: + - Setting this option will tell the system to wait the full wait_time, instead + of returning on first data received + - Because 2 functions are called, potential time delay is doubled. + type: bool + required: false + default: false message_filter: description: - Return outstanding messages requiring operator action awaiting a @@ -101,6 +121,19 @@ zos_operator_action_query: job_name: im5* +- name: Display all outstanding messages whose job name begin with im7, + wait up to 10 seconds per call (20 seconds overall) for data + zos_operator_action_query: + job_name: im7* + wait_time_s: 10 + +- name: Display all outstanding messages whose job name begin with im9, + wait up a full 15 seconds per call (30 seconds overall) for data + zos_operator_action_query: + job_name: im9* + wait_time_s: 15 + wait: True + - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: message_id: dsi* @@ -235,6 +268,8 @@ def run_module(): system=dict(type="str", required=False), message_id=dict(type="str", required=False), job_name=dict(type="str", required=False), + wait_time_s=dict(type="int", required=False, default=1), + wait=dict(type="bool", required=False, default=False), message_filter=dict( type="dict", required=False, @@ -251,7 +286,19 @@ def run_module(): try: new_params = parse_params(module.params) - cmd_result_a = execute_command("d r,a,s") + kwargs = {} + + wait_s = params.get("wait_time_s") + + if new_params.get("wait"): + kwargs.update({"wait_arg": True}) + + args = [] + + cmdtxt = "d r,a,s" + + cmd_result_a = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + if cmd_result_a.rc > 0: module.fail_json( msg="A non-zero return code was received while querying the operator.", @@ -263,7 +310,10 @@ def run_module(): cmd="d r,a,s", ) - cmd_result_b = execute_command("d r,a,jn") + cmdtxt = new_params.get("d r,a,jn") + + cmd_result_b = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + if cmd_result_b.rc > 0: module.fail_json( msg="A non-zero return code was received while querying the operator.", @@ -295,6 +345,8 @@ def parse_params(params): system=dict(arg_type=system_type, required=False), message_id=dict(arg_type=message_id_type, required=False), job_name=dict(arg_type=job_name_type, required=False), + wait_time_s=dict(arg_type="int", required=False), + wait=dict(arg_type="bool", required=False), message_filter=dict(arg_type=message_filter_type, required=False) ) parser = BetterArgParser(arg_defs) @@ -395,10 +447,10 @@ def handle_conditions(list, condition_type, value): return newlist -def execute_command(operator_cmd): +def execute_command(operator_cmd, timeout=1, *args, **kwargs): - response = opercmd.execute(operator_cmd) -# response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) + # response = opercmd.execute(operator_cmd) + response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) rc = response.rc stdout = response.stdout_response From 9136715656a6511581278105c30e3dc4850d8ad7 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 11:59:55 -0400 Subject: [PATCH 182/495] corrected pep8/pylint errors --- plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 2dfa12fdb..c34d64818 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -284,7 +284,7 @@ def run_operator_command(params): cmdtxt = params.get("cmd") if params.get("wait"): - kwargs.update({"wait_arg": True}) + kwargs.update({"wait_arg": True}) args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index ddef406eb..9abd8e493 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -288,7 +288,7 @@ def run_module(): kwargs = {} - wait_s = params.get("wait_time_s") + wait_s = new_params.get("wait_time_s") if new_params.get("wait"): kwargs.update({"wait_arg": True}) From ac8acd5d38095d6bbb435a87916b1766a6edd95e Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 12:48:51 -0400 Subject: [PATCH 183/495] added output to failing test --- .../modules/test_zos_operator_action_query_func.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index 4872a2a02..ce60e9588 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -23,14 +23,19 @@ def test_zos_operator_action_query_no_options(ansible_zos_module): hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query() try: + print( "\n\n=========== in no-options loop 1") for action in results.get("actions"): + print( action.get("message_text", "-no-")) if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass + + print( "\n\n=============== in no-options loop 2") for result in results.contacted.values(): - assert result.get("actions") + print( result ) + # assert result.get("actions") def test_zos_operator_action_query_option_message_id(ansible_zos_module): hosts = ansible_zos_module From bb190304e6928dd1d23cf1f0fb684a3ca3c8ce34 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 13:00:40 -0400 Subject: [PATCH 184/495] added another test print, since the test error moved --- tests/functional/modules/test_zos_operator_func.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 84f593f51..6843a5678 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -41,6 +41,8 @@ def test_zos_operator_various_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd=command) for result in results.contacted.values(): + print( "\n\n===result:" ) + print( result ) assert result["rc"] == expected_rc assert result.get("changed") is changed From e7f02a2974274cae243e0fb5e4ec8a165e949817 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 13:22:38 -0400 Subject: [PATCH 185/495] added print and un-commented upper assertion --- .../modules/test_zos_operator_action_query_func.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index ce60e9588..76dbc5d83 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -35,7 +35,7 @@ def test_zos_operator_action_query_no_options(ansible_zos_module): print( "\n\n=============== in no-options loop 2") for result in results.contacted.values(): print( result ) - # assert result.get("actions") + assert result.get("actions") def test_zos_operator_action_query_option_message_id(ansible_zos_module): hosts = ansible_zos_module @@ -48,7 +48,10 @@ def test_zos_operator_action_query_option_message_id(ansible_zos_module): cmd="{0}cancel".format(action.get("number"))) except Exception: pass + + print( "\n\n=============== in msgid loop 2") for result in results.contacted.values(): + print( result ) assert result.get("actions") def test_zos_operator_action_query_option_message_id_invalid_abbreviation( From 15371f669dd1d76a5011b64720d3b328b30a58c8 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 16:14:19 -0400 Subject: [PATCH 186/495] removed embedded print statements, changed \$ to '$ to eliminate deprecation warning --- .../modules/test_zos_operator_action_query_func.py | 9 +-------- tests/functional/modules/test_zos_operator_func.py | 5 +---- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index 76dbc5d83..30f5175e4 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -16,25 +16,21 @@ __metaclass__ = type import pytest -import unittest + def test_zos_operator_action_query_no_options(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query() try: - print( "\n\n=========== in no-options loop 1") for action in results.get("actions"): - print( action.get("message_text", "-no-")) if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass - print( "\n\n=============== in no-options loop 2") for result in results.contacted.values(): - print( result ) assert result.get("actions") def test_zos_operator_action_query_option_message_id(ansible_zos_module): @@ -49,9 +45,7 @@ def test_zos_operator_action_query_option_message_id(ansible_zos_module): except Exception: pass - print( "\n\n=============== in msgid loop 2") for result in results.contacted.values(): - print( result ) assert result.get("actions") def test_zos_operator_action_query_option_message_id_invalid_abbreviation( @@ -275,7 +269,6 @@ def test_zos_operator_action_query_option_message_filter_multiple_matches( except Exception: pass for result in results.contacted.values(): - print(result.get("actions")) assert result.get("actions") assert len(result.get("actions")) > 1 diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 6843a5678..5ce87370d 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -41,8 +41,6 @@ def test_zos_operator_various_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd=command) for result in results.contacted.values(): - print( "\n\n===result:" ) - print( result ) assert result["rc"] == expected_rc assert result.get("changed") is changed @@ -114,7 +112,6 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): # assert timediff < 15 for result in results.contacted.values(): - pprint(result) assert result["rc"] == 0 assert result.get("changed") is True assert result.get("content") is not None @@ -124,7 +121,7 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): def test_response_come_back_complete(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_operator(cmd="\$dspl") + results = hosts.all.zos_operator(cmd='$dspl') # \$ triggers warning res = dict() res["stdout"] = [] for result in results.contacted.values(): From 93573e1a02765cddda81148948aaf1e74bdf4414 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 16:28:16 -0400 Subject: [PATCH 187/495] switch test back to "\$ --- tests/functional/modules/test_zos_operator_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 5ce87370d..4ad07d882 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -121,7 +121,7 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): def test_response_come_back_complete(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_operator(cmd='$dspl') # \$ triggers warning + results = hosts.all.zos_operator(cmd="\$dspl") res = dict() res["stdout"] = [] for result in results.contacted.values(): From 959e0c3c4cdde6e5b05829e5f1c0be151fbcf947 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 20 Sep 2023 17:43:53 -0400 Subject: [PATCH 188/495] correction to zos_operator_action_query to pass the second query cmd correctly --- plugins/modules/zos_operator_action_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 9abd8e493..4270e33c8 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -310,7 +310,7 @@ def run_module(): cmd="d r,a,s", ) - cmdtxt = new_params.get("d r,a,jn") + cmdtxt = "d r,a,jn" cmd_result_b = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) From a97234d40a2a6987ce7e5e49e59a24e0c414caa3 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 21 Sep 2023 09:48:21 -0400 Subject: [PATCH 189/495] Added PR# to changelog tweaked description of new feature in both affected functions. --- .../943-enhance-Add-wait-zos-operator-and-query.yml | 5 +++-- plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index 4067471dc..59547c8d4 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,5 +1,6 @@ enhancements: - zos_operator: Added the 'wait' parameter back in to use the new -w operator. - (https://github.com/ansible-collections/ibm_zos_core/pull/xxx) + (https://github.com/ansible-collections/ibm_zos_core/pull/976) - zos_operator_action_query: Add wait_time_s and 'wait' parameters in the operator_action_query. - (https://github.com/ansible-collections/ibm_zos_core/pull/xxx) \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/976) + diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index c34d64818..9df17799f 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -78,7 +78,7 @@ zos_operator: cmd: "\\$PJ(*)" -- name: Execute operator command to show jobs, always waiting 8 seconds for response +- name: Execute operator command to show jobs, always waiting 5 seconds for response zos_operator: cmd: 'd a,all' wait_time_s: 5 diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 4270e33c8..026a8343a 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -128,7 +128,7 @@ wait_time_s: 10 - name: Display all outstanding messages whose job name begin with im9, - wait up a full 15 seconds per call (30 seconds overall) for data + wait a full 15 seconds per call (30 seconds overall) for data zos_operator_action_query: job_name: im9* wait_time_s: 15 From e6321bfb91e0ac815072358c23cf56ccc4fc7b97 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 21 Sep 2023 10:08:03 -0400 Subject: [PATCH 190/495] added changelog for ticket --- .../fragments/920-bug-add-library-feature-documentation.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 changelogs/fragments/920-bug-add-library-feature-documentation.yml diff --git a/changelogs/fragments/920-bug-add-library-feature-documentation.yml b/changelogs/fragments/920-bug-add-library-feature-documentation.yml new file mode 100644 index 000000000..efafd82bc --- /dev/null +++ b/changelogs/fragments/920-bug-add-library-feature-documentation.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_copy: Add 'LIBRARY' option as a destination dataset suboption. + Update documentation to show this option. + (https://github.com/ansible-collections/ibm_zos_core/pull/968) From 77dae086ecc472bf5948c79931e4c8c282b77f4b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 22 Sep 2023 09:04:37 -0600 Subject: [PATCH 191/495] Modified versions in bug issue template to avoid users picking a non-existing version Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 8a1cd3ccd..d50883065 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -45,8 +45,6 @@ body: description: Which version of IBM Enterprise Python are you using? multiple: false options: - - v3.14.x - - v3.13.x - v3.12.x - v3.11.x - v3.10.x @@ -61,9 +59,6 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: - - v1.9.0 - - v1.9.0-beta.1 - - v1.8.0 - v1.8.0-beta.1 - v1.7.0 - v1.7.0-beta.1 From ab780d2decc46a43d077046e155aa587319b69f8 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 22 Sep 2023 12:25:32 -0400 Subject: [PATCH 192/495] Added zoau_api_version logic to check for 1.2.5 or later as a condition for wait_arg Added mention of this to documentation of interface --- plugins/modules/zos_operator.py | 24 ++++++++++++++++++-- plugins/modules/zos_operator_action_query.py | 24 ++++++++++++++++++-- 2 files changed, 44 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 9df17799f..76b894425 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -59,6 +59,7 @@ description: - Setting this option will tell the system to wait the full wait_time, instead of returning on first data received + - This option is only available with zoau 1.2.5 or later type: bool required: false default: false @@ -176,6 +177,11 @@ except Exception: opercmd = MissingZOAUImport() +try: + from zoautil_py import ZOAU_API_VERSION +except Exception: + ZOAU_API_VERSION = "1.2.0" + def execute_command(operator_cmd, timeout=1, *args, **kwargs): start = timer() @@ -283,8 +289,22 @@ def run_operator_command(params): wait_s = params.get("wait_time_s") cmdtxt = params.get("cmd") - if params.get("wait"): - kwargs.update({"wait_arg": True}) + zv = ZOAU_API_VERSION.split(".") + getit = False + if( zv[0] > "1"): + getit = True + elif( zv[0] == "1" and zv[1] > "2"): + getit = True + elif( zv[0] == "1" and zv[1] == "2" and zv[2] > "4"): + getit = True + + if getit: + if params.get("wait"): + kwargs.update({"wait_arg": True}) + else: + kwargs.pop("wait_arg", "0") + else: + kwargs.pop("wait_arg", "0") args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 026a8343a..0b340f936 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -74,6 +74,7 @@ - Setting this option will tell the system to wait the full wait_time, instead of returning on first data received - Because 2 functions are called, potential time delay is doubled. + - This option is only available with zoau 1.2.5 or later type: bool required: false default: false @@ -262,6 +263,11 @@ except Exception: opercmd = MissingZOAUImport() +try: + from zoautil_py import ZOAU_API_VERSION +except Exception: + ZOAU_API_VERSION = "1.2.0" + def run_module(): module_args = dict( @@ -290,8 +296,22 @@ def run_module(): wait_s = new_params.get("wait_time_s") - if new_params.get("wait"): - kwargs.update({"wait_arg": True}) + zv = ZOAU_API_VERSION.split(".") + getit = False + if( zv[0] > "1"): + getit = True + elif( zv[0] == "1" and zv[1] > "2"): + getit = True + elif( zv[0] == "1" and zv[1] == "2" and zv[2] > "4"): + getit = True + + if getit: + if new_params.get("wait"): + kwargs.update({"wait_arg": True}) + else: + kwargs.pop("wait_arg", "0") + else: + kwargs.pop("wait_arg", "0") args = [] From a8f74262389e181ddb961e7d6f31df7407467085 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 22 Sep 2023 12:55:37 -0400 Subject: [PATCH 193/495] corrected pep8 errors --- plugins/modules/zos_operator.py | 14 +++++++------- plugins/modules/zos_operator_action_query.py | 6 +++--- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 76b894425..35f155e65 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -291,18 +291,18 @@ def run_operator_command(params): zv = ZOAU_API_VERSION.split(".") getit = False - if( zv[0] > "1"): + if zv[0] > "1": getit = True - elif( zv[0] == "1" and zv[1] > "2"): + elif zv[0] == "1" and zv[1] > "2": getit = True - elif( zv[0] == "1" and zv[1] == "2" and zv[2] > "4"): + elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": getit = True if getit: - if params.get("wait"): - kwargs.update({"wait_arg": True}) - else: - kwargs.pop("wait_arg", "0") + if params.get("wait"): + kwargs.update({"wait_arg": True}) + else: + kwargs.pop("wait_arg", "0") else: kwargs.pop("wait_arg", "0") diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 0b340f936..77d130697 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -298,11 +298,11 @@ def run_module(): zv = ZOAU_API_VERSION.split(".") getit = False - if( zv[0] > "1"): + if zv[0] > "1": getit = True - elif( zv[0] == "1" and zv[1] > "2"): + elif zv[0] == "1" and zv[1] > "2": getit = True - elif( zv[0] == "1" and zv[1] == "2" and zv[2] > "4"): + elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": getit = True if getit: From 4d033385b0d1be275623c30ae45fcf0c9ad13628 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 4 Oct 2023 10:11:18 -0400 Subject: [PATCH 194/495] removed redundant changelog fragment --- .../fragments/920-bug-add-library-feature-documentation.yml | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 changelogs/fragments/920-bug-add-library-feature-documentation.yml diff --git a/changelogs/fragments/920-bug-add-library-feature-documentation.yml b/changelogs/fragments/920-bug-add-library-feature-documentation.yml deleted file mode 100644 index efafd82bc..000000000 --- a/changelogs/fragments/920-bug-add-library-feature-documentation.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_copy: Add 'LIBRARY' option as a destination dataset suboption. - Update documentation to show this option. - (https://github.com/ansible-collections/ibm_zos_core/pull/968) From 0e2fb96338e0457f9095619f41b6a7f9fbda63ab Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 4 Oct 2023 11:21:00 -0400 Subject: [PATCH 195/495] Changed Enhancements to minor_changes --- .../fragments/943-enhance-Add-wait-zos-operator-and-query.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index 59547c8d4..71e24fc14 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,4 +1,4 @@ -enhancements: +minor_changes: - zos_operator: Added the 'wait' parameter back in to use the new -w operator. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - zos_operator_action_query: Add wait_time_s and 'wait' parameters in the operator_action_query. From 36e6368a96c963309f91e44db9a4330db7fc5250 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 5 Oct 2023 22:46:11 -0700 Subject: [PATCH 196/495] push updated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_copy.rst | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 45dee10a7..71cd094fc 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -74,7 +74,7 @@ dest If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. - If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If ``src`` is binary, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. + If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. When ``dest`` is a data set, precedence rules apply. If ``dest_data_set`` is set, this will take precedence over an existing data set. If ``dest`` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, ``dest`` will be created with the same attributes of ``src``. @@ -156,6 +156,21 @@ is_binary | **type**: bool +executable + If set to ``true``, indicates that the file or library to be copied is an executable. + + If the ``src`` executable has an alias, the alias information is also copied. If the ``dest`` is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. + + If *executable=true*, and ``dest`` is a data set, it must be a PDS or PDSE (library). + + If ``dest`` is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. + + If ``dest`` is a file, execute permission for the user will be added to the file (``u+x``). + + | **required**: False + | **type**: bool + + local_follow This flag indicates that any existing filesystem links in the source tree should be followed. @@ -247,7 +262,7 @@ dest_data_set | **required**: True | **type**: str - | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, MEMBER, BASIC + | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, MEMBER, BASIC, LIBRARY space_primary @@ -672,6 +687,13 @@ Examples record_format: VB record_length: 150 + - name: Copy a Program Object on remote system to a new PDSE member MYCOBOL. + zos_copy: + src: HLQ.COBOLSRC.PDSE(TESTPGM) + dest: HLQ.NEW.PDSE(MYCOBOL) + remote_src: true + executable: true + @@ -691,6 +713,8 @@ Notes `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option executable that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will be responded with a (FSUM8976,./zos_copy.html) error. + See Also From 94985a39ff1702dc763028f2caca6a237f8fc581 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 6 Oct 2023 10:04:42 -0600 Subject: [PATCH 197/495] Enabler/validate path join (#962) * Added real path fetch to base * Sec changes * Updated changelog and template * Added validation to zos_copy and zos_fetch Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Corrected positional argument * Added validation changes Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Removed f-string * Fixed path join for copy_to_file --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/963-validate-path-join.yml | 5 +++ plugins/action/zos_copy.py | 4 +- plugins/action/zos_fetch.py | 8 ++-- plugins/module_utils/encode.py | 12 ++--- plugins/module_utils/template.py | 4 +- plugins/module_utils/validation.py | 44 +++++++++++++++++++ plugins/modules/zos_copy.py | 30 ++++++++----- plugins/modules/zos_fetch.py | 3 +- 8 files changed, 84 insertions(+), 26 deletions(-) create mode 100644 changelogs/fragments/963-validate-path-join.yml create mode 100644 plugins/module_utils/validation.py diff --git a/changelogs/fragments/963-validate-path-join.yml b/changelogs/fragments/963-validate-path-join.yml new file mode 100644 index 000000000..017c793cc --- /dev/null +++ b/changelogs/fragments/963-validate-path-join.yml @@ -0,0 +1,5 @@ +minor_changes: + - zos_fetch: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) + - zos_copy: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index c6273132c..afc454359 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -33,7 +33,7 @@ is_data_set ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template @@ -186,7 +186,7 @@ def run(self, tmp=None, task_vars=None): src = rendered_dir task_args["size"] = sum( - os.stat(os.path.join(path, f)).st_size + os.stat(os.path.join(validation.validate_safe_path(path), validation.validate_safe_path(f))).st_size for path, dirs, files in os.walk(src) for f in files ) diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index e10dbd75f..087c70953 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -26,7 +26,7 @@ from ansible.utils.display import Display from ansible import cli -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation SUPPORTED_DS_TYPES = frozenset({"PS", "PO", "VSAM", "USS"}) @@ -182,10 +182,12 @@ def run(self, tmp=None, task_vars=None): if dest.endswith(os.sep): if fetch_member: base = os.path.dirname(dest) - dest = os.path.join(base, member_name) + dest = os.path.join(validation.validate_safe_path(base), validation.validate_safe_path(member_name)) + display.vvv(u"This is how dest looks {0}".format(dest), host=self._play_context.remote_addr) else: base = os.path.basename(source_local) - dest = os.path.join(dest, base) + dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(base)) + display.vvv(u"This is how dest looks {0}".format(dest), host=self._play_context.remote_addr) if not dest.startswith("/"): dest = self._loader.path_dwim(dest) else: diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index a96bf46d5..047aa654c 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -31,7 +31,7 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import copy, system +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import copy, system, validation from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) @@ -327,7 +327,7 @@ def uss_convert_encoding_prev(self, src, dest, from_code, to_code): if path.isdir(src): for (dir, subdir, files) in walk(src): for file in files: - file_list.append(path.join(dir, file)) + file_list.append(path.join(validation.validate_safe_path(dir), validation.validate_safe_path(file))) if len(file_list) == 0: raise EncodeError( "Directory {0} is empty. Please check the path.".format(src) @@ -335,8 +335,8 @@ def uss_convert_encoding_prev(self, src, dest, from_code, to_code): elif len(file_list) == 1: if path.isdir(dest): file_name = path.basename(file_list[0]) - src_f = path.join(src, file_name) - dest_f = path.join(dest, file_name) + src_f = path.join(validation.validate_safe_path(src), validation.validate_safe_path(file_name)) + dest_f = path.join(validation.validate_safe_path(dest), validation.validate_safe_path(file_name)) convert_rc = self.uss_convert_encoding( src_f, dest_f, from_code, to_code ) @@ -361,7 +361,7 @@ def uss_convert_encoding_prev(self, src, dest, from_code, to_code): else: if path.isdir(dest): file_name = path.basename(path.abspath(src)) - dest = path.join(dest, file_name) + dest = path.join(validation.validate_safe_path(dest), validation.validate_safe_path(file_name)) convert_rc = self.uss_convert_encoding(src, dest, from_code, to_code) return convert_rc @@ -433,7 +433,7 @@ def mvs_convert_encoding( elif dest_type == "PO": for (dir, subdir, files) in walk(temp_dest): for file in files: - temp_file = path.join(dir, file) + temp_file = path.join(validation.validate_safe_path(dir), validation.validate_safe_path(file)) rc, out, err = copy.copy_uss2mvs(temp_file, dest, "PO") convert_rc = True else: diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py index 308946da2..407a231c6 100644 --- a/plugins/module_utils/template.py +++ b/plugins/module_utils/template.py @@ -32,7 +32,7 @@ except Exception: jinja2 = MissingImport("jinja2") -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation def _process_boolean(arg, default=False): @@ -283,7 +283,7 @@ def render_dir_template(self, variables): try: temp_parent_dir = tempfile.mkdtemp() last_dir = os.path.basename(self.template_dir) - temp_template_dir = os.path.join(temp_parent_dir, last_dir) + temp_template_dir = os.path.join(validation.validate_safe_path(temp_parent_dir), validation.validate_safe_path(last_dir)) os.makedirs(temp_template_dir, exist_ok=True) except FileExistsError as err: raise FileExistsError("Unable to create directory for rendered templates: {0}".format( diff --git a/plugins/module_utils/validation.py b/plugins/module_utils/validation.py new file mode 100644 index 000000000..c08847503 --- /dev/null +++ b/plugins/module_utils/validation.py @@ -0,0 +1,44 @@ +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +""" +Class implemented for common validations that are not specific to z/OS but rather system or +security related. + +""" +import os + + +def validate_safe_path(path): + """ + This function is implemented to validate against path traversal attack + when using os.path.join function. + + In this action plugin, path is on the controller. + """ + if not os.path.isabs(path): + real_path = os.path.realpath(path) + if not os.path.exists(real_path) and not real_path.endswith(os.sep): + # if path doesn't exist and does not contain separator then is likely a member. + return path + if not os.access(path=real_path, mode=os.F_OK): + raise DirectoryTraversalError(real_path) + return path + + +class DirectoryTraversalError(Exception): + def __init__(self, path): + self.msg = "Detected directory traversal, user does not have access to {0}".format(path) + super().__init__(self.msg) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index aabd5447e..625e2e6b2 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -743,7 +743,7 @@ idcams ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - better_arg_parser, data_set, encode, backup, copy + better_arg_parser, data_set, encode, backup, copy, validation, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, @@ -958,7 +958,7 @@ def _convert_encoding_dir(self, dir_path, from_code_set, to_code_set): enc_utils = encode.EncodeUtils() for path, dirs, files in os.walk(dir_path): for file_path in files: - full_file_path = os.path.join(path, file_path) + full_file_path = os.path.join(validation.validate_safe_path(path), validation.validate_safe_path(file_path)) rc = enc_utils.uss_convert_encoding( full_file_path, full_file_path, from_code_set, to_code_set ) @@ -1159,7 +1159,9 @@ def copy_to_uss( self.module.set_mode_if_different(dest, mode, False) if changed_files: for filepath in changed_files: - self.module.set_mode_if_different(os.path.join(dest, filepath), mode, False) + self.module.set_mode_if_different( + os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(filepath)), mode, False + ) if group is not None: self.module.set_group_if_different(dest, group, False) if owner is not None: @@ -1182,9 +1184,9 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): Returns: {str} -- Destination where the file was copied to """ + src_path = os.path.basename(src) if src else "inline_copy" if os.path.isdir(dest): - dest = os.path.join(dest, os.path.basename(src) - if src else "inline_copy") + dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_path)) new_src = temp_path or conv_path or src try: @@ -1250,13 +1252,13 @@ def _copy_to_dir( try: if copy_directory: - dest = os.path.join(dest_dir, os.path.basename(os.path.normpath(src_dir))) + dest = os.path.join(validation.validate_safe_path(dest_dir), validation.validate_safe_path(os.path.basename(os.path.normpath(src_dir)))) dest = shutil.copytree(new_src_dir, dest, dirs_exist_ok=force) # Restoring permissions for preexisting files and subdirectories. for filepath, permissions in original_permissions: mode = "0{0:o}".format(stat.S_IMODE(permissions)) - self.module.set_mode_if_different(os.path.join(dest, filepath), mode, False) + self.module.set_mode_if_different(os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(filepath)), mode, False) except Exception as err: raise CopyOperationError( msg="Error while copying data to destination directory {0}".format(dest_dir), @@ -1291,7 +1293,9 @@ def _get_changed_files(self, src, dest, copy_directory): files_to_change = [] existing_files = [] for relative_path in files_to_copy: - if os.path.exists(os.path.join(dest, parent_dir, relative_path)): + if os.path.exists( + os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(parent_dir), validation.validate_safe_path(relative_path)) + ): existing_files.append(relative_path) else: files_to_change.append(relative_path) @@ -1301,7 +1305,9 @@ def _get_changed_files(self, src, dest, copy_directory): files_to_change.extend(existing_files) # Creating tuples with (filename, permissions). original_permissions = [ - (filepath, os.stat(os.path.join(dest, parent_dir, filepath)).st_mode) + (filepath, os.stat( + os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(parent_dir), validation.validate_safe_path(filepath)) + ).st_mode) for filepath in existing_files ] @@ -1323,11 +1329,11 @@ def _walk_uss_tree(self, dir): for dirpath, subdirs, files in os.walk(".", True): paths += [ - os.path.join(dirpath, subdir).replace("./", "") + os.path.join(validation.validate_safe_path(dirpath), validation.validate_safe_path(subdir)).replace("./", "") for subdir in subdirs ] paths += [ - os.path.join(dirpath, filepath).replace("./", "") + os.path.join(validation.validate_safe_path(dirpath), validation.validate_safe_path(filepath)).replace("./", "") for filepath in files ] @@ -2663,7 +2669,7 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED: if not remote_src and not copy_member and os.path.isdir(temp_path): - temp_path = os.path.join(temp_path, os.path.basename(src)) + temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src))) pdse_copy_handler = PDSECopyHandler( module, is_binary=is_binary, executable=executable, backup_name=backup_name diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index ca6359c55..d8b15c0d9 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -282,6 +282,7 @@ better_arg_parser, data_set, encode, + validation, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( MissingZOAUImport, @@ -511,7 +512,7 @@ def _fetch_pdse(self, src, is_binary, encoding=None): root, dirs, files = next(os.walk(dir_path)) try: for file in files: - file_path = os.path.join(root, file) + file_path = os.path.join(validation.validate_safe_path(root), validation.validate_safe_path(file)) enc_utils.uss_convert_encoding( file_path, file_path, from_code_set, to_code_set ) From dd10d0d9065fb27eefcefda11eccba1b1b8cfa97 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 6 Oct 2023 15:15:06 -0400 Subject: [PATCH 198/495] Removed 'wait' as an option, and pass wait_arg=true to zoau --- ...nhance-Add-wait-zos-operator-and-query.yml | 4 ++-- plugins/modules/zos_operator.py | 23 +------------------ plugins/modules/zos_operator_action_query.py | 23 ++----------------- 3 files changed, 5 insertions(+), 45 deletions(-) diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index 71e24fc14..dd1829148 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,6 +1,6 @@ minor_changes: - - zos_operator: Added the 'wait' parameter back in to use the new -w operator. + - zos_operator: Changed system to call 'wait=true' parameter to zoau call. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_operator_action_query: Add wait_time_s and 'wait' parameters in the operator_action_query. + - zos_operator_action_query: Add wait_time_s parameter in the operator_action_query. (https://github.com/ansible-collections/ibm_zos_core/pull/976) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 35f155e65..ab34aa0cc 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -55,14 +55,6 @@ type: int required: false default: 1 - wait: - description: - - Setting this option will tell the system to wait the full wait_time, instead - of returning on first data received - - This option is only available with zoau 1.2.5 or later - type: bool - required: false - default: false """ EXAMPLES = r""" @@ -83,12 +75,6 @@ zos_operator: cmd: 'd a,all' wait_time_s: 5 - wait: true - -- name: Execute operator command to show jobs, waiting up to 7 seconds for response - zos_operator: - cmd: 'd a,all' - wait_time_s: 7 - name: Display the system symbols and associated substitution texts. zos_operator: @@ -199,7 +185,6 @@ def run_module(): cmd=dict(type="str", required=True), verbose=dict(type="bool", required=False, default=False), wait_time_s=dict(type="int", required=False, default=1), - wait=dict(type="bool", required=False, default=False), ) result = dict(changed=False) @@ -270,7 +255,6 @@ def parse_params(params): cmd=dict(arg_type="str", required=True), verbose=dict(arg_type="bool", required=False), wait_time_s=dict(arg_type="int", required=False), - wait=dict(arg_type="bool", required=False), ) parser = BetterArgParser(arg_defs) new_params = parser.parse_args(params) @@ -299,12 +283,7 @@ def run_operator_command(params): getit = True if getit: - if params.get("wait"): - kwargs.update({"wait_arg": True}) - else: - kwargs.pop("wait_arg", "0") - else: - kwargs.pop("wait_arg", "0") + kwargs.update({"wait_arg": True}) args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 77d130697..877b265e5 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -63,21 +63,10 @@ - When set to 0, the system default is used. - This option is helpful on a busy system requiring more time to execute commands. - - Setting I(wait) can instruct if execution should wait the - full I(wait_time_s). - Because 2 functions are called, potential time delay is doubled. type: int required: false default: 1 - wait: - description: - - Setting this option will tell the system to wait the full wait_time, instead - of returning on first data received - - Because 2 functions are called, potential time delay is doubled. - - This option is only available with zoau 1.2.5 or later - type: bool - required: false - default: false message_filter: description: - Return outstanding messages requiring operator action awaiting a @@ -123,7 +112,7 @@ job_name: im5* - name: Display all outstanding messages whose job name begin with im7, - wait up to 10 seconds per call (20 seconds overall) for data + waiting 10 seconds per call (20 seconds overall) for data zos_operator_action_query: job_name: im7* wait_time_s: 10 @@ -133,7 +122,6 @@ zos_operator_action_query: job_name: im9* wait_time_s: 15 - wait: True - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: @@ -275,7 +263,6 @@ def run_module(): message_id=dict(type="str", required=False), job_name=dict(type="str", required=False), wait_time_s=dict(type="int", required=False, default=1), - wait=dict(type="bool", required=False, default=False), message_filter=dict( type="dict", required=False, @@ -306,12 +293,7 @@ def run_module(): getit = True if getit: - if new_params.get("wait"): - kwargs.update({"wait_arg": True}) - else: - kwargs.pop("wait_arg", "0") - else: - kwargs.pop("wait_arg", "0") + kwargs.update({"wait_arg": True}) args = [] @@ -366,7 +348,6 @@ def parse_params(params): message_id=dict(arg_type=message_id_type, required=False), job_name=dict(arg_type=job_name_type, required=False), wait_time_s=dict(arg_type="int", required=False), - wait=dict(arg_type="bool", required=False), message_filter=dict(arg_type=message_filter_type, required=False) ) parser = BetterArgParser(arg_defs) From 9690f487639071576ad31e986ecc1dfafea9a88c Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Sat, 7 Oct 2023 09:49:21 -0600 Subject: [PATCH 199/495] Added latest to allow member copy when disp=shr (#980) * Added latest to allow member copy when disp=shr * Added changelog fragment * Added new force option and test for locked data sets non VSAM * Fixed pep8 issue * Added new option force lock * Modified test case with new option * Added force option * Added doc and warning * Updated changelog fragment * Update 980-zos-copy-disp-shr.yml * Updated changelog fragment * Removed unused comments Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added message * Added force_lock to all CopyHandlers * Modified test case * Changed use of dataset vs data set --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/980-zos-copy-disp-shr.yml | 5 ++ plugins/action/zos_copy.py | 4 ++ plugins/modules/zos_copy.py | 66 +++++++++++++++---- .../functional/modules/test_zos_copy_func.py | 47 +++++++++---- 4 files changed, 99 insertions(+), 23 deletions(-) create mode 100644 changelogs/fragments/980-zos-copy-disp-shr.yml diff --git a/changelogs/fragments/980-zos-copy-disp-shr.yml b/changelogs/fragments/980-zos-copy-disp-shr.yml new file mode 100644 index 000000000..541e611c1 --- /dev/null +++ b/changelogs/fragments/980-zos-copy-disp-shr.yml @@ -0,0 +1,5 @@ +minor_changes: +- zos_copy - Add new option `force_lock` that can copy into data sets that are + already in use by other processes (DISP=SHR). User needs to use with caution + because this is subject to race conditions and can lead to data loss. + (https://github.com/ansible-collections/ibm_zos_core/pull/980). diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index afc454359..6b86d24a3 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -59,6 +59,7 @@ def run(self, tmp=None, task_vars=None): local_follow = _process_boolean(task_args.get('local_follow'), default=False) remote_src = _process_boolean(task_args.get('remote_src'), default=False) is_binary = _process_boolean(task_args.get('is_binary'), default=False) + force_lock = _process_boolean(task_args.get('force_lock'), default=False) executable = _process_boolean(task_args.get('executable'), default=False) ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=False) backup_name = task_args.get("backup_name", None) @@ -126,6 +127,9 @@ def run(self, tmp=None, task_vars=None): msg = "Cannot specify 'mode', 'owner' or 'group' for MVS destination" return self._exit_action(result, msg, failed=True) + if force_lock: + display.warning( + msg="Using force_lock uses operations that are subject to race conditions and can lead to data loss, use with caution.") template_dir = None if not remote_src: diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 625e2e6b2..073e11688 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -151,6 +151,22 @@ type: bool default: false required: false + force_lock: + description: + - By default, when c(dest) is a MVS data set and is being used by another + process with DISP=SHR or DISP=OLD the module will fail. Use C(force_lock) + to bypass this check and continue with copy. + - If set to C(true) and destination is a MVS data set opened by another + process then zos_copy will try to copy using DISP=SHR. + - Using C(force_lock) uses operations that are subject to race conditions + and can lead to data loss, use with caution. + - If a data set member has aliases, and is not a program + object, copying that member to a dataset that is in use will result in + the aliases not being preserved in the target dataset. When this scenario + occurs the module will fail. + type: bool + default: false + required: false ignore_sftp_stderr: description: - During data transfer through SFTP, the module fails if the SFTP command @@ -778,7 +794,8 @@ def __init__( module, is_binary=False, executable=False, - backup_name=None + backup_name=None, + force_lock=False, ): """Utility class to handle copying data between two targets @@ -793,11 +810,15 @@ def __init__( is executable backup_name {str} -- The USS path or data set name of destination backup + force_lock {str} -- Whether the dest data set should be copied into + using disp=shr when is opened by another + process. """ self.module = module self.is_binary = is_binary self.executable = executable self.backup_name = backup_name + self.force_lock = force_lock def run_command(self, cmd, **kwargs): """ Wrapper for AnsibleModule.run_command """ @@ -824,10 +845,14 @@ def copy_to_seq( """ new_src = conv_path or temp_path or src copy_args = dict() + copy_args["options"] = "" if self.is_binary: copy_args["options"] = "-B" + if self.force_lock: + copy_args["options"] += " -f" + response = datasets._copy(new_src, dest, None, **copy_args) if response.rc != 0: raise CopyOperationError( @@ -847,10 +872,12 @@ def copy_to_vsam(self, src, dest): src {str} -- The name of the source VSAM dest {str} -- The name of the destination VSAM """ + out_dsp = "shr" if self.force_lock else "old" + dds = {"OUT": "{0},{1}".format(dest.upper(), out_dsp)} repro_cmd = """ REPRO - INDATASET('{0}') - - OUTDATASET('{1}')""".format(src.upper(), dest.upper()) - rc, out, err = idcams(repro_cmd, authorized=True) + OUTFILE(OUT)""".format(src.upper()) + rc, out, err = idcams(repro_cmd, dds=dds, authorized=True) if rc != 0: raise CopyOperationError( msg=("IDCAMS REPRO encountered a problem while " @@ -1404,7 +1431,8 @@ def __init__( module, is_binary=False, executable=False, - backup_name=None + backup_name=None, + force_lock=False, ): """ Utility class to handle copying to partitioned data sets or partitioned data set members. @@ -1422,7 +1450,8 @@ def __init__( module, is_binary=is_binary, executable=executable, - backup_name=backup_name + backup_name=backup_name, + force_lock=force_lock, ) def copy_to_pdse( @@ -1543,6 +1572,7 @@ def copy_to_member( src = src.replace("$", "\\$") dest = dest.replace("$", "\\$").upper() opts = dict() + opts["options"] = "" if self.is_binary: opts["options"] = "-B" @@ -1550,6 +1580,9 @@ def copy_to_member( if self.executable: opts["options"] = "-IX" + if self.force_lock: + opts["options"] += " -f" + response = datasets._copy(src, dest, None, **opts) rc, out, err = response.rc, response.stdout_response, response.stderr_response @@ -2234,7 +2267,7 @@ def data_set_locked(dataset_name): dataset_name (str) - the data set name used to check if there is a lock. Returns: - bool -- rue if the data set is locked, or False if the data set is not locked. + bool -- True if the data set is locked, or False if the data set is not locked. """ # Using operator command "D GRS,RES=(*,{dataset_name})" to detect if a data set # is in use, when a data set is in use it will have "EXC/SHR and SHARE" @@ -2294,6 +2327,7 @@ def run_module(module, arg_def): copy_member = module.params.get('copy_member') tmphlq = module.params.get('tmp_hlq') force = module.params.get('force') + force_lock = module.params.get('force_lock') dest_data_set = module.params.get('dest_data_set') if dest_data_set: @@ -2472,10 +2506,11 @@ def run_module(module, arg_def): # for try to write in dest and if both src and dest are in lock. # ******************************************************************** if dest_ds_type != "USS": - is_dest_lock = data_set_locked(dest_name) - if is_dest_lock: - module.fail_json( - msg="Unable to write to dest '{0}' because a task is accessing the data set.".format(dest_name)) + if not force_lock: + is_dest_lock = data_set_locked(dest_name) + if is_dest_lock: + module.fail_json( + msg="Unable to write to dest '{0}' because a task is accessing the data set.".format(dest_name)) # ******************************************************************** # Backup should only be performed if dest is an existing file or # data set. Otherwise ignored. @@ -2590,7 +2625,8 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, - backup_name=backup_name + backup_name=backup_name, + force_lock=force_lock, ) try: @@ -2672,7 +2708,11 @@ def run_module(module, arg_def): temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src))) pdse_copy_handler = PDSECopyHandler( - module, is_binary=is_binary, executable=executable, backup_name=backup_name + module, + is_binary=is_binary, + executable=executable, + backup_name=backup_name, + force_lock=force_lock, ) pdse_copy_handler.copy_to_pdse( @@ -2808,6 +2848,7 @@ def main(): src_member=dict(type='bool'), local_charset=dict(type='str'), force=dict(type='bool', default=False), + force_lock=dict(type='bool', default=False), mode=dict(type='str', required=False), tmp_hlq=dict(type='str', required=False, default=None), ), @@ -2827,6 +2868,7 @@ def main(): checksum=dict(arg_type='str', required=False), validate=dict(arg_type='bool', required=False), volume=dict(arg_type='str', required=False), + force_lock=dict(type='bool', default=False), dest_data_set=dict( arg_type='dict', diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 2bcf59a21..9c8aa9f9b 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -185,7 +185,7 @@ call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //LOCKMEM EXEC PGM=BPXBATCH //STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}({1})' +SH /tmp/disp_shr/pdse-lock '{0}' //STDIN DD DUMMY //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* @@ -1565,22 +1565,30 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq -def test_copy_dest_lock(ansible_zos_module): +@pytest.mark.parametrize("ds_type", ["PDS", "PDSE", "SEQ"]) +def test_copy_dest_lock(ansible_zos_module, ds_type): DATASET_1 = "USER.PRIVATE.TESTDS" DATASET_2 = "ADMI.PRIVATE.TESTDS" MEMBER_1 = "MEM1" + if ds_type == "PDS" or ds_type == "PDSE": + src_data_set = DATASET_1 + "({0})".format(MEMBER_1) + dest_data_set = DATASET_2 + "({0})".format(MEMBER_1) + else: + src_data_set = DATASET_1 + dest_data_set = DATASET_2 try: hosts = ansible_zos_module hosts.all.zos_data_set(name=DATASET_1, state="present", type="pdse", replace=True) hosts.all.zos_data_set(name=DATASET_2, state="present", type="pdse", replace=True) - hosts.all.zos_data_set(name=DATASET_1 + "({0})".format(MEMBER_1), state="present", type="member", replace=True) - hosts.all.zos_data_set(name=DATASET_2 + "({0})".format(MEMBER_1), state="present", type="member", replace=True) + if ds_type == "PDS" or ds_type == "PDSE": + hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) # copy text_in source - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(DUMMY_DATA, DATASET_2+"({0})".format(MEMBER_1))) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) hosts.all.zos_copy( - content=call_c_jcl.format(DATASET_1, MEMBER_1), + content=call_c_jcl.format(dest_data_set), dest='/tmp/disp_shr/call_c_pgm.jcl', force=True ) @@ -1590,14 +1598,31 @@ def test_copy_dest_lock(ansible_zos_module): # pause to ensure c code acquires lock time.sleep(5) results = hosts.all.zos_copy( - src = DATASET_2 + "({0})".format(MEMBER_1), - dest = DATASET_1 + "({0})".format(MEMBER_1), - remote_src = True + src = src_data_set, + dest = dest_data_set, + remote_src = True, + force=True, + force_lock=True, ) for result in results.contacted.values(): print(result) - assert result.get("changed") == False - assert result.get("msg") is not None + assert result.get("changed") == True + assert result.get("msg") is None + # verify that the content is the same + verify_copy = hosts.all.shell( + cmd="dcat \"{0}\"".format(dest_data_set), + executable=SHELL_EXECUTABLE, + ) + for vp_result in verify_copy.contacted.values(): + print(vp_result) + verify_copy_2 = hosts.all.shell( + cmd="dcat \"{0}\"".format(src_data_set), + executable=SHELL_EXECUTABLE, + ) + for vp_result_2 in verify_copy_2.contacted.values(): + print(vp_result_2) + assert vp_result_2.get("stdout") == vp_result.get("stdout") + finally: # extract pid ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") From 60250ee2065a361b2a491f2fc98157f473931e53 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 9 Oct 2023 14:16:49 -0400 Subject: [PATCH 200/495] Changed operator_action_query to wait=false time = 5 Renamed vague variable name to "use_wait_arg" Reflected changes and 1.2.5 dependancy in the changelog fragment --- ...nhance-Add-wait-zos-operator-and-query.yml | 4 +- plugins/modules/zos_operator.py | 10 ++--- plugins/modules/zos_operator_action_query.py | 40 +++++-------------- 3 files changed, 17 insertions(+), 37 deletions(-) diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index dd1829148..5a8202c34 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,6 +1,8 @@ minor_changes: - zos_operator: Changed system to call 'wait=true' parameter to zoau call. + Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_operator_action_query: Add wait_time_s parameter in the operator_action_query. + - zos_operator_action_query: Add a max delay of 5 seconds on each part of the operator_action_query. + Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index ab34aa0cc..2d1fb807f 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -274,15 +274,15 @@ def run_operator_command(params): cmdtxt = params.get("cmd") zv = ZOAU_API_VERSION.split(".") - getit = False + use_wait_arg = False if zv[0] > "1": - getit = True + use_wait_arg = True elif zv[0] == "1" and zv[1] > "2": - getit = True + use_wait_arg = True elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": - getit = True + use_wait_arg = True - if getit: + if use_wait_arg: kwargs.update({"wait_arg": True}) args = [] diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 877b265e5..ddf895eb9 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -29,6 +29,8 @@ - "Ping Xiao (@xiaoping8385)" - "Demetrios Dimatos (@ddimatos)" - "Ivan Moreno (@rexemin)" + - "Rich Parker (@richp405)" + options: system: description: @@ -57,16 +59,6 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false - wait_time_s: - description: - - Set maximum time in seconds to wait for the commands to execute. - - When set to 0, the system default is used. - - This option is helpful on a busy system requiring more time to execute - commands. - - Because 2 functions are called, potential time delay is doubled. - type: int - required: false - default: 1 message_filter: description: - Return outstanding messages requiring operator action awaiting a @@ -111,18 +103,6 @@ zos_operator_action_query: job_name: im5* -- name: Display all outstanding messages whose job name begin with im7, - waiting 10 seconds per call (20 seconds overall) for data - zos_operator_action_query: - job_name: im7* - wait_time_s: 10 - -- name: Display all outstanding messages whose job name begin with im9, - wait a full 15 seconds per call (30 seconds overall) for data - zos_operator_action_query: - job_name: im9* - wait_time_s: 15 - - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: message_id: dsi* @@ -262,7 +242,6 @@ def run_module(): system=dict(type="str", required=False), message_id=dict(type="str", required=False), job_name=dict(type="str", required=False), - wait_time_s=dict(type="int", required=False, default=1), message_filter=dict( type="dict", required=False, @@ -281,19 +260,19 @@ def run_module(): kwargs = {} - wait_s = new_params.get("wait_time_s") + wait_s = 5 zv = ZOAU_API_VERSION.split(".") - getit = False + use_wait_arg = False if zv[0] > "1": - getit = True + use_wait_arg = True elif zv[0] == "1" and zv[1] > "2": - getit = True + use_wait_arg = True elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": - getit = True + use_wait_arg = True - if getit: - kwargs.update({"wait_arg": True}) + if use_wait_arg: + kwargs.update({"wait_arg": False}) args = [] @@ -347,7 +326,6 @@ def parse_params(params): system=dict(arg_type=system_type, required=False), message_id=dict(arg_type=message_id_type, required=False), job_name=dict(arg_type=job_name_type, required=False), - wait_time_s=dict(arg_type="int", required=False), message_filter=dict(arg_type=message_filter_type, required=False) ) parser = BetterArgParser(arg_defs) From 4377ac2eadf269706c259e1eea013b5ea6554314 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Wed, 11 Oct 2023 14:01:34 -0700 Subject: [PATCH 201/495] Enhancement/423/zos copy add data set member alias support (#1014) * add aliases option and enable text-based member copy w alias to an existing pds Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * disable alias included in data set member listing when collecting src members from pds Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch 'alias' option introduced in zoau1.2.5 to '-H' flag available in zoau1.2.4. also enable alias copying of executables Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * implement aliases for copy to/from USS, add guard rail for non-executable copy to USS with aliases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add exception handler for executable PDS copy, handle non-existent library pds for executable USS src, add error message for PDS copy attempt to USS file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up init functions, break up long lines Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * refactor executable member to member copy for alias work, this commit refactors some helpers which break a select few loadlib tests, but those will be refactored in upcoming comimts Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * refactor and expand test_copy_pds_loadlib_member_to_uss test case to copy to a new loadlib Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add aliases error raised check to text-based pds member copy to uss Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add aliases error raised check to text-based pds member copy to uss Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * parametrize cobol program to pass in custom output string, create helper method around running and validating loadlib pgms, refactor executable tests to use helper method, add helper method to create loadlib w multiple members, add test case for loadlib to loadlib copy w and w/o aliases. Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add test case for copying entire loadlib to uss dir and then to another loadlib. refactor other loadlib test case to reduce loc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new pytest markers for aliases and loadlib test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in a sneak preview version of bug #920 addressed in PR #968 which adds LIBRARY as a valid value to the dest_data_set option Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * alter placement of aliases option to go after executable options Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add docs and examples for aliases option Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * minor tweaks to doc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * addres santiy check issues Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * missed a sanity check issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve remaining merge conflicts Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 style issues Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup spacing issue in examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add updated rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add comments for explaning logic/code flow around full pds copy Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- ...4-zos-copy-add-data-set-member-aliases.yml | 5 + docs/source/modules/zos_copy.rst | 22 +- plugins/modules/zos_copy.py | 107 ++- .../functional/modules/test_zos_copy_func.py | 766 ++++++++++++++++-- tests/pytest.ini | 4 +- 5 files changed, 805 insertions(+), 99 deletions(-) create mode 100644 changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml diff --git a/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml b/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml new file mode 100644 index 000000000..4122ea878 --- /dev/null +++ b/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml @@ -0,0 +1,5 @@ +minor_changes: +- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases + when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. + Copying aliases of text based members to/from USS is not supported. + (https://github.com/ansible-collections/ibm_zos_core/pull/1014) \ No newline at end of file diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 71cd094fc..191570bae 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -171,6 +171,17 @@ executable | **type**: bool +aliases + If set to ``true``, indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. + + Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when ``executable=True`` and ``dest`` is a USS file or directory, this option will be ignored. + + Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. + + | **required**: False + | **type**: bool + + local_follow This flag indicates that any existing filesystem links in the source tree should be followed. @@ -687,12 +698,21 @@ Examples record_format: VB record_length: 150 - - name: Copy a Program Object on remote system to a new PDSE member MYCOBOL. + - name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL zos_copy: src: HLQ.COBOLSRC.PDSE(TESTPGM) dest: HLQ.NEW.PDSE(MYCOBOL) remote_src: true executable: true + aliases: true + + - name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE + zos_copy: + src: '/home/loadlib/' + dest: HLQ.LOADLIB.NEW + remote_src: true + executable: true + aliases: true diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 10b35ea22..c671d87a0 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -203,6 +203,16 @@ type: bool default: false required: false + aliases: + description: + - If set to C(true), indicates that any aliases found in the source + (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. + - Aliases are implicitly preserved when libraries are copied over to USS destinations. + That is, when C(executable=True) and C(dest) is a USS file or directory, this option will be ignored. + - Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. + type: bool + default: false + required: false local_follow: description: - This flag indicates that any existing filesystem links in the source tree @@ -591,12 +601,21 @@ record_format: VB record_length: 150 -- name: Copy a Program Object on remote system to a new PDSE member MYCOBOL. +- name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL zos_copy: src: HLQ.COBOLSRC.PDSE(TESTPGM) dest: HLQ.NEW.PDSE(MYCOBOL) remote_src: true executable: true + aliases: true + +- name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE + zos_copy: + src: '/home/loadlib/' + dest: HLQ.LOADLIB.NEW + remote_src: true + executable: true + aliases: true """ RETURN = r""" @@ -795,6 +814,7 @@ def __init__( module, is_binary=False, executable=False, + aliases=False, backup_name=None, force_lock=False, ): @@ -818,6 +838,7 @@ def __init__( self.module = module self.is_binary = is_binary self.executable = executable + self.aliases = aliases self.backup_name = backup_name self.force_lock = force_lock @@ -1097,6 +1118,7 @@ def __init__( module, is_binary=False, executable=False, + aliases=False, common_file_args=None, backup_name=None, ): @@ -1114,7 +1136,7 @@ def __init__( backup_name {str} -- The USS path or data set name of destination backup """ super().__init__( - module, is_binary=is_binary, executable=executable, backup_name=backup_name + module, is_binary=is_binary, executable=executable, aliases=aliases, backup_name=backup_name ) self.common_file_args = common_file_args @@ -1149,6 +1171,7 @@ def copy_to_uss( self._mvs_copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name ) + if self.executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) @@ -1393,6 +1416,7 @@ def _mvs_copy_to_uss( Keyword Arguments: member_name {str} -- The name of the source data set member """ + if os.path.isdir(dest): # If source is a data set member, destination file should have # the same name as the member. @@ -1403,9 +1427,10 @@ def _mvs_copy_to_uss( os.mkdir(dest) except FileExistsError: pass + opts = dict() if self.executable: - opts["options"] = "-IX" + opts["options"] = "-IX " try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: @@ -1421,7 +1446,17 @@ def _mvs_copy_to_uss( stderr=response.stderr_response ) else: - copy.copy_pds2uss(src, dest, is_binary=self.is_binary) + if self.executable: + response = datasets._copy(src, dest, None, **opts) + if response.rc != 0: + raise CopyOperationError( + msg="Error while copying source {0} to {1}".format(src, dest), + rc=response.rc, + stdout=response.stdout_response, + stderr=response.stderr_response + ) + else: + copy.copy_pds2uss(src, dest, is_binary=self.is_binary) except Exception as err: raise CopyOperationError(msg=str(err)) @@ -1432,6 +1467,7 @@ def __init__( module, is_binary=False, executable=False, + aliases=False, backup_name=None, force_lock=False, ): @@ -1451,6 +1487,7 @@ def __init__( module, is_binary=is_binary, executable=executable, + aliases=aliases, backup_name=backup_name, force_lock=force_lock, ) @@ -1516,7 +1553,13 @@ def copy_to_pdse( if src_member: members.append(data_set.extract_member_name(new_src)) else: - members = datasets.list_members(new_src) + # The 'members' variable below is used to store a list of members in the src PDS/E. + # Items in the list are passed to the copy_to_member function. + # Aliases are included in the output by list_members unless the alias option is disabled. + # The logic for preserving/copying aliases is contained in the copy_to_member function. + opts = {} + opts['options'] = '-H ' # mls option to hide aliases + members = datasets.list_members(new_src, **opts) src_members = ["{0}({1})".format(src_data_set_name, member) for member in members] dest_members = [ @@ -1525,7 +1568,7 @@ def copy_to_pdse( for member in members ] - existing_members = datasets.list_members(dest) + existing_members = datasets.list_members(dest) # fyi - this list includes aliases overwritten_members = [] new_members = [] @@ -1578,8 +1621,14 @@ def copy_to_member( if self.is_binary: opts["options"] = "-B" + if self.aliases and not self.executable: + # lower case 'i' for text-based copy (dcp) + opts["options"] = "-i" + if self.executable: - opts["options"] = "-IX" + opts["options"] = "-X" + if self.aliases: + opts["options"] = "-IX" if self.force_lock: opts["options"] += " -f" @@ -1817,6 +1866,7 @@ def is_compatible( Returns: {bool} -- Whether src can be copied to dest. """ + # ******************************************************************** # If the destination does not exist, then obviously it will need # to be created. As a result, target is compatible. @@ -2194,7 +2244,17 @@ def allocate_destination_data_set( # TODO: decide on whether to compute the longest file record length and use that for the whole PDSE. size = sum(os.stat("{0}/{1}".format(src, member)).st_size for member in os.listdir(src)) # This PDSE will be created with record format VB and a record length of 1028. - dest_params = get_data_set_attributes(dest, size, is_binary, type="PDSE", volume=volume) + + if executable: + dest_params = get_data_set_attributes( + dest, size, is_binary, + record_format='U', + record_length=0, + type="LIBRARY", + volume=volume + ) + else: + dest_params = get_data_set_attributes(dest, size, is_binary, type="PDSE", volume=volume) data_set.DataSet.ensure_present(replace=force, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_VSAM: @@ -2311,6 +2371,7 @@ def run_module(module, arg_def): remote_src = module.params.get('remote_src') is_binary = module.params.get('is_binary') executable = module.params.get('executable') + aliases = module.params.get('aliases') backup = module.params.get('backup') backup_name = module.params.get('backup_name') validate = module.params.get('validate') @@ -2502,7 +2563,7 @@ def run_module(module, arg_def): ) # ******************************************************************** - # To validate the source and dest are not lock in a batch process by + # To validate the source and dest are not locked in a batch process by # the machine and not generate a false positive check the disposition # for try to write in dest and if both src and dest are in lock. # ******************************************************************** @@ -2512,6 +2573,29 @@ def run_module(module, arg_def): if is_dest_lock: module.fail_json( msg="Unable to write to dest '{0}' because a task is accessing the data set.".format(dest_name)) + + # ******************************************************************** + # Alias support is not avaiable to and from USS for text-based data sets. + # ******************************************************************** + if aliases: + if (src_ds_type == 'USS' or dest_ds_type == 'USS') and not executable: + module.fail_json( + msg="Alias support for text-based data sets is not available " + + "for USS sources (src) or targets (dest). " + + "Try setting executable=True or aliases=False." + ) + + # ******************************************************************** + # Attempt to write PDS (not member) to USS file (i.e. a non-directory) + # ******************************************************************** + if ( + src_ds_type in data_set.DataSet.MVS_PARTITIONED and not src_member + and dest_ds_type == 'USS' and not os.path.isdir(dest) + ): + module.fail_json( + msg="Cannot write a partitioned data set (PDS) to a USS file." + ) + # ******************************************************************** # Backup should only be performed if dest is an existing file or # data set. Otherwise ignored. @@ -2523,6 +2607,7 @@ def run_module(module, arg_def): res_args["note"] = "Destination is empty, backup request ignored" else: backup_name = backup_data(dest, dest_ds_type, backup_name, tmphlq) + # ******************************************************************** # If destination does not exist, it must be created. To determine # what type of data set destination must be, a couple of simple checks @@ -2646,6 +2731,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + aliases=aliases, common_file_args=dict(mode=mode, group=group, owner=owner), backup_name=backup_name, ) @@ -2712,6 +2798,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + aliases=aliases, backup_name=backup_name, force_lock=force_lock, ) @@ -2759,6 +2846,7 @@ def main(): dest=dict(required=True, type='str'), is_binary=dict(type='bool', default=False), executable=dict(type='bool', default=False), + aliases=dict(type='bool', default=False, required=False), encoding=dict( type='dict', required=False, @@ -2861,6 +2949,7 @@ def main(): dest=dict(arg_type='data_set_or_path', required=True), is_binary=dict(arg_type='bool', required=False, default=False), executable=dict(arg_type='bool', required=False, default=False), + aliases=dict(arg_type='bool', required=False, default=False), content=dict(arg_type='str', required=False), backup=dict(arg_type='bool', default=False, required=False), backup_name=dict(arg_type='data_set_or_path', required=False), diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 9c8aa9f9b..1fa6397e2 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -97,21 +97,33 @@ TEST_PDSE = "SYS1.NFSLIBE" TEST_PDSE_MEMBER = "SYS1.NFSLIBE(GFSAMAIN)" +COBOL_PRINT_STR = "HELLO WORLD ONE" +COBOL_PRINT_STR2 = "HELLO WORLD TWO" + COBOL_SRC = """ IDENTIFICATION DIVISION.\n PROGRAM-ID. HELLOWRD.\n \n PROCEDURE DIVISION.\n - DISPLAY "SIMPLE HELLO WORLD".\n + DISPLAY "{0}".\n STOP RUN.\n """ + + + +# format params for LINK_JCL: +# {0} - cobol src pds dsn +# {1} - cobol src pds member +# {2} - candidate loadlib dsn +# {3} - candidate loadlib member +# {4} - alias member name LINK_JCL = """ //COMPLINK JOB MSGCLASS=H,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //STEP1 EXEC PGM=IGYCRCTL //STEPLIB DD DSN=IGYV5R10.SIGYCOMP,DISP=SHR // DD DSN=IGYV5R10.SIGYMAC,DISP=SHR -//SYSIN DD DISP=SHR,DSN={0} +//SYSIN DD DISP=SHR,DSN={0}({1}) //SYSPRINT DD SYSOUT=* //SYSLIN DD UNIT=SYSDA,DISP=(MOD), // SPACE=(CYL,(1,1)), @@ -138,15 +150,18 @@ //SYSPRINT DD SYSOUT=* //SYSLIB DD DSN=CEE.SCEELKED,DISP=SHR // DD DSN=CEE.SCEELKEX,DISP=SHR -//SYSLMOD DD DSN={1}, +//SYSLMOD DD DSN={2}({3}), // DISP=SHR //SYSUT1 DD UNIT=SYSDA,DCB=BLKSIZE=1024, // SPACE=(TRK,(3,3)) //SYSTERM DD SYSOUT=* //SYSPRINT DD SYSOUT=* //SYSLIN DD DSN=&&LOADSET,DISP=(OLD,KEEP) -//SYSIN DD DUMMY +// DD * + ALIAS {4} + NAME {3} //* +//SYSIN DD DUMMY """ @@ -295,54 +310,87 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, hosts.all.file(path=record_src, state="absent") -def link_loadlib_from_cobol(hosts, ds_name, cobol_pds): +def validate_loadlib_pgm(hosts, steplib, pgm_name, expected_output_str): + + mvscmd_str = "mvscmd --steplib='{0}' --pgm='{1}' --sysout='*' --sysprint='*'" + verify_copy_exec_pgm = hosts.all.shell( + cmd=mvscmd_str.format(steplib, pgm_name) + ) + + for v_cp_pgm in verify_copy_exec_pgm.contacted.values(): + assert v_cp_pgm.get("rc") == 0 + assert v_cp_pgm.get("stdout").strip() == expected_output_str + + +def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem='ALIAS1'): """ - Given a PDSE, links a cobol program making allocated in a temp ds resulting in ds_name - as a loadlib. + Given a PDSE, links a cobol program (allocated in a temp ds) resulting in a loadlib. Arguments: - ds_name (str) -- PDS/E to be linked with the cobol program. - cobol_src (str) -- Cobol source code to be used as the program. - - Notes: PDS names are in the format of SOME.PDSNAME(MEMBER) + cobol_src_pds (str) - cobol src pds dsn containing members containing cobol src code. + cobol_src_mem (str) - cobol src pds member containing cobol src code. + loadlib_pds (str) - candidate loadlib dsn + loadlib_mem (str) - candidate loadlib member + loadlib_alias_mem (str) - alias member name """ - # Copy the Link program - temp_jcl = "/tmp/link.jcl" + temp_jcl_uss_path = "/tmp/link.jcl" rc = 0 try: + # Copy over the Link program to USS cp_res = hosts.all.zos_copy( - content=LINK_JCL.format(cobol_pds, ds_name), - dest="/tmp/link.jcl", + content=LINK_JCL.format(cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem), + dest=temp_jcl_uss_path, force=True, ) - # Link the temp ds with ds_name + # Submit link JCL. job_result = hosts.all.zos_job_submit( src="/tmp/link.jcl", location="USS", wait_time_s=60 ) for result in job_result.contacted.values(): - #print("link job submit result {0}".format(result)) rc = result.get("jobs")[0].get("ret_code").get("code") finally: - hosts.all.file(path=temp_jcl, state="absent") + hosts.all.file(path=temp_jcl_uss_path, state="absent") return rc -def generate_executable_ds(hosts, src, dest, cobol): - member = "HELLOSRC" - hosts.all.zos_copy(content=COBOL_SRC, dest=cobol) - dest_name = "{0}({1})".format(dest, member) - src_name = "{0}({1})".format(src, member) - rc = link_loadlib_from_cobol(hosts, dest_name, cobol) - assert rc == 0 - cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" - hosts.all.shell(cmd=cmd.format(member, dest)) - rc = link_loadlib_from_cobol(hosts, src_name, cobol) - hosts.all.shell(cmd=cmd.format(member, src)) - assert rc == 0 - exec_res = hosts.all.shell(cmd=cmd.format(member, src)) - for result in exec_res.contacted.values(): - assert result.get("rc") == 0 + +def generate_executable_ds(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem="ALIAS1"): + + # copy COBOL src string to pds. + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mem)) + + # run link-edit to create loadlib. + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem) + assert link_rc == 0 + + # execute pgm to test loadlib + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_alias_mem, expected_output_str=COBOL_PRINT_STR) + + +def generate_loadlib(hosts, cobol_src_pds, cobol_src_mems, loadlib_pds, loadlib_mems, loadlib_alias_mems): + # copy cobol src + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mems[0])) + # copy cobol2 src + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR2), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mems[1])) + + # run link-edit for pgm1 + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mems[0], loadlib_pds, loadlib_mems[0], loadlib_alias_mems[0]) + assert link_rc == 0 + # run link-edit for pgm2 + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mems[1], loadlib_pds, loadlib_mems[1], loadlib_alias_mems[1]) + assert link_rc == 0 + + # execute pgm to test pgm1 + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_mems[0], expected_output_str=COBOL_PRINT_STR) + # execute pgm to test alias of pgm1 + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_alias_mems[0], expected_output_str=COBOL_PRINT_STR) + # execute pgm to test pgm2 + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_mems[1], expected_output_str=COBOL_PRINT_STR2) + # execute pgm to test alias of pgm2 + validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_alias_mems[1], expected_output_str=COBOL_PRINT_STR2) + def generate_executable_uss(hosts, src, src_jcl_call): hosts.all.zos_copy(content=hello_world, dest=src, force=True) @@ -355,6 +403,7 @@ def generate_executable_uss(hosts, src, src_jcl_call): stdout = res.get("stdout") assert "Hello World" in str(stdout) + @pytest.mark.uss @pytest.mark.parametrize("src", [ dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), @@ -2558,18 +2607,34 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): @pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases @pytest.mark.parametrize("is_created", ["true", "false"]) def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_created): hosts = ansible_zos_module - # The volume for this dataset should use a system symbol. # This dataset and member should be available on any z/OS system. - src = "USER.LOAD.SRC" - dest = "USER.LOAD.DEST" - cobol_pds = "USER.COBOL.SRC" - dest_exe = "USER.LOAD.EXE" + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + pgm_mem = "HELLO" + pgm_mem_alias = "ALIAS1" try: + # allocate pds for cobol src code + hosts.all.zos_data_set( + name=cobol_src_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + # allocate pds for src loadlib hosts.all.zos_data_set( - name=src, + name=src_lib, state="present", type="pdse", record_format="U", @@ -2579,8 +2644,124 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr space_type="M", replace=True ) + + # generate loadlib into src_pds + generate_executable_ds(hosts, cobol_src_pds, cobol_src_mem, src_lib, pgm_mem, pgm_mem_alias) + + # tests existent/non-existent destination data set code path. + if not is_created: + # ensure dest data sets NOT present + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + else: + # pre-allocate dest loadlib to copy over without an alias. + hosts.all.zos_data_set( + name=dest_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + # pre-allocate dest loadlib to copy over with an alias. + hosts.all.zos_data_set( + name=dest_lib_aliases, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + # zos_copy w an executable: + copy_res = hosts.all.zos_copy( + src="{0}({1})".format(src_lib, pgm_mem), + dest="{0}({1})".format(dest_lib, pgm_mem), + remote_src=True, + executable=True, + aliases=False + ) + # zos_copy w an executables and its alias: + copy_res_aliases = hosts.all.zos_copy( + src="{0}({1})".format(src_lib, pgm_mem), + dest="{0}({1})".format(dest_lib_aliases, pgm_mem), + remote_src=True, + executable=True, + aliases=True + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest_lib, pgm_mem) + + for result in copy_res_aliases.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, pgm_mem) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + verify_copy_mls_aliases = hosts.all.shell( + cmd="mls {0}".format(dest_lib_aliases), + executable=SHELL_EXECUTABLE + ) + + for v_cp in verify_copy_mls.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + mls_alias_str = "ALIAS({0})".format(pgm_mem_alias) + assert mls_alias_str not in stdout + + for v_cp in verify_copy_mls_aliases.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + expected_mls_str = "{0} ALIAS({1})".format(pgm_mem, pgm_mem_alias) + assert expected_mls_str in stdout + + # execute pgms to validate copy + validate_loadlib_pgm(hosts, steplib=dest_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=dest_lib_aliases, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=dest_lib_aliases, pgm_name=pgm_mem_alias, expected_output_str=COBOL_PRINT_STR) + + finally: + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + +@pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases +@pytest.mark.uss +def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): + hosts = ansible_zos_module + + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + pgm_mem = "HELLO" + + dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + pgm_mem_alias = "ALIAS1" + + uss_dest = "/tmp/HELLO" + try: + # allocate data sets hosts.all.zos_data_set( - name=dest, + name=src_lib, state="present", type="pdse", record_format="U", @@ -2591,7 +2772,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr replace=True ) hosts.all.zos_data_set( - name=cobol_pds, + name=cobol_src_pds, state="present", type="pds", space_primary=2, @@ -2600,12 +2781,190 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr block_size=3120, replace=True, ) - member = "HELLOSRC" - cobol_pds = "{0}({1})".format(cobol_pds, member) - generate_executable_ds(hosts, src, dest, cobol_pds) - if is_created: + hosts.all.zos_data_set( + name=dest_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + hosts.all.zos_data_set( + name=dest_lib_aliases, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + # generate loadlib into src_pds + generate_executable_ds(hosts, cobol_src_pds, cobol_src_mem, src_lib, pgm_mem, pgm_mem_alias) + + # zos_copy an executable to USS file: + copy_uss_res = hosts.all.zos_copy( + src="{0}({1})".format(src_lib, pgm_mem), + dest=uss_dest, + remote_src=True, + executable=True, + force=True) + for result in copy_uss_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + + # run executable on USS + verify_exe_uss = hosts.all.shell( + cmd="{0}".format(uss_dest) + ) + for v_cp_u in verify_exe_uss.contacted.values(): + assert v_cp_u.get("rc") == 0 + assert COBOL_PRINT_STR == v_cp_u.get("stdout").strip() + + + # zos_copy from USS file w an executable: + copy_res = hosts.all.zos_copy( + src="{0}".format(uss_dest), + dest="{0}({1})".format(dest_lib, pgm_mem), + remote_src=True, + executable=True, + aliases=False + ) + # zos_copy from USS file w an executables and its alias: + copy_res_aliases = hosts.all.zos_copy( + src="{0}".format(uss_dest), + dest="{0}({1})".format(dest_lib_aliases, pgm_mem), + remote_src=True, + executable=True, + aliases=True + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest_lib, pgm_mem) + for result in copy_res_aliases.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, pgm_mem) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + verify_copy_mls_aliases = hosts.all.shell( + cmd="mls {0}".format(dest_lib_aliases), + executable=SHELL_EXECUTABLE + ) + + for v_cp in verify_copy_mls.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + mls_alias_str = "ALIAS({0})".format(pgm_mem_alias) + assert mls_alias_str not in stdout + + for v_cp in verify_copy_mls_aliases.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + expected_mls_str = "{0} ALIAS({1})".format(pgm_mem, pgm_mem_alias) + assert expected_mls_str in stdout + + # execute pgms to validate copy + validate_loadlib_pgm(hosts, steplib=dest_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=dest_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + validate_loadlib_pgm(hosts, steplib=dest_lib_aliases, pgm_name=pgm_mem_alias, expected_output_str=COBOL_PRINT_STR) + + finally: + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + hosts.all.file(name=uss_dest, state="absent") + + +@pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases +@pytest.mark.parametrize("is_created", ["false", "true"]) +def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): + + hosts = ansible_zos_module + + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + cobol_src_mem2 = "HICBL2" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + pgm_mem = "HELLO" + pgm2_mem = "HELLO2" + pgm_mem_alias = "ALIAS1" + pgm2_mem_alias = "ALIAS2" + + + try: + # allocate pds for cobol src code + hosts.all.zos_data_set( + name=cobol_src_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + # allocate pds for src loadlib + hosts.all.zos_data_set( + name=src_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + # generate loadlib w 2 members w 1 alias each + generate_loadlib( + hosts=hosts, + cobol_src_pds=cobol_src_pds, + cobol_src_mems=[cobol_src_mem, cobol_src_mem2], + loadlib_pds=src_lib, + loadlib_mems=[pgm_mem, pgm2_mem], + loadlib_alias_mems=[pgm_mem_alias, pgm2_mem_alias] + ) + + if not is_created: + # ensure dest data sets absent for this variation of the test case. + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + else: + # allocate dest loadlib to copy over without an alias. hosts.all.zos_data_set( - name=dest_exe, + name=dest_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + # allocate dest loadlib to copy over with an alias. + hosts.all.zos_data_set( + name=dest_lib_aliases, state="present", type="pdse", record_format="U", @@ -2615,42 +2974,153 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr space_type="M", replace=True ) - copy_res = hosts.all.zos_copy( - src="{0}({1})".format(src, member), - dest="{0}({1})".format(dest_exe, "MEM1"), - remote_src=True, - executable=True) - verify_copy = hosts.all.shell( - cmd="mls {0}".format(dest_exe), - executable=SHELL_EXECUTABLE - ) + if not is_created: + # dest data set does not exist, specify it in dest_dataset param. + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib), + remote_src=True, + executable=True, + aliases=False, + dest_data_set={ + 'type': "LIBRARY", + 'record_format': "U", + 'record_length': 0, + 'block_size': 32760, + 'space_primary': 2, + 'space_type': "M", + } + ) + # copy src loadlib to dest library pds w aliases + copy_res_aliases = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib_aliases), + remote_src=True, + executable=True, + aliases=True, + dest_data_set={ + 'type': "LIBRARY", + 'record_format': "U", + 'record_length': 0, + 'block_size': 32760, + 'space_primary': 2, + 'space_type': "M", + } + ) + + else: + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib), + remote_src=True, + executable=True, + aliases=False + ) + # copy src loadlib to dest library pds w aliases + copy_res_aliases = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib_aliases), + remote_src=True, + executable=True, + aliases=True + ) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True - assert result.get("dest") == "{0}({1})".format(dest_exe, "MEM1") + assert result.get("dest") == "{0}".format(dest_lib) - for v_cp in verify_copy.contacted.values(): + for result in copy_res_aliases.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}".format(dest_lib_aliases) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + verify_copy_mls_aliases = hosts.all.shell( + cmd="mls {0}".format(dest_lib_aliases), + executable=SHELL_EXECUTABLE + ) + + for v_cp in verify_copy_mls.contacted.values(): assert v_cp.get("rc") == 0 stdout = v_cp.get("stdout") assert stdout is not None + mls_alias_str = "ALIAS({0})".format(pgm_mem_alias) + mls_alias_str2 = "ALIAS({0})".format(pgm2_mem_alias) + assert mls_alias_str not in stdout + assert mls_alias_str2 not in stdout + + for v_cp in verify_copy_mls_aliases.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + expected_mls_str = "{0} ALIAS({1})".format(pgm_mem, pgm_mem_alias) + expected_mls_str2 = "{0} ALIAS({1})".format(pgm2_mem, pgm2_mem_alias) + assert expected_mls_str in stdout + assert expected_mls_str2 in stdout + + # verify pgms remain executable + pgm_output_map = { + (dest_lib, pgm_mem, COBOL_PRINT_STR), + (dest_lib_aliases, pgm_mem, COBOL_PRINT_STR), + (dest_lib_aliases, pgm_mem_alias, COBOL_PRINT_STR), + (dest_lib, pgm2_mem, COBOL_PRINT_STR2), + (dest_lib_aliases, pgm2_mem, COBOL_PRINT_STR2), + (dest_lib_aliases, pgm2_mem_alias, COBOL_PRINT_STR2) + } + for steplib, pgm, output in pgm_output_map: + validate_loadlib_pgm(hosts, steplib=steplib, pgm_name=pgm, expected_output_str=output) + finally: - hosts.all.zos_data_set(name=dest, state="absent") - hosts.all.zos_data_set(name=src, state="absent") - hosts.all.zos_data_set(name=cobol_pds, state="absent") + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") @pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases @pytest.mark.uss -def test_copy_pds_loadlib_member_to_uss(ansible_zos_module): +def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): + hosts = ansible_zos_module - src = "USER.LOAD.SRC" - dest = "USER.LOAD.DEST" - cobol_pds = "USER.COBOL.SRC" - uss_dest = "/tmp/HELLO" + + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + cobol_src_mem2 = "HICBL2" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + pgm_mem = "HELLO" + pgm2_mem = "HELLO2" + pgm_mem_alias = "ALIAS1" + pgm2_mem_alias = "ALIAS2" + + # note - aliases for executables are implicitly copied over (by module design) for USS targets. + uss_dir_path = '/tmp/uss-loadlib/' + try: + # allocate pds for cobol src code + hosts.all.zos_data_set( + name=cobol_src_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + # allocate pds for src loadlib hosts.all.zos_data_set( - name=src, + name=src_lib, state="present", type="pdse", record_format="U", @@ -2660,8 +3130,22 @@ def test_copy_pds_loadlib_member_to_uss(ansible_zos_module): space_type="M", replace=True ) + + # generate loadlib w 2 members w 1 alias each + generate_loadlib( + hosts=hosts, + cobol_src_pds=cobol_src_pds, + cobol_src_mems=[cobol_src_mem, cobol_src_mem2], + loadlib_pds=src_lib, + loadlib_mems=[pgm_mem, pgm2_mem], + loadlib_alias_mems=[pgm_mem_alias, pgm2_mem_alias] + ) + + # make dest USS dir + hosts.all.file(path=uss_dir_path, state="directory") + # allocate dest loadlib to copy over without an alias. hosts.all.zos_data_set( - name=dest, + name=dest_lib, state="present", type="pdse", record_format="U", @@ -2671,41 +3155,129 @@ def test_copy_pds_loadlib_member_to_uss(ansible_zos_module): space_type="M", replace=True ) + # allocate dest loadlib to copy over with an alias. hosts.all.zos_data_set( - name=cobol_pds, + name=dest_lib_aliases, state="present", - type="pds", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, space_primary=2, - record_format="FB", - record_length=80, - block_size=3120, - replace=True, + space_type="M", + replace=True ) - member = "HELLOSRC" - cobol_pds = "{0}({1})".format(cobol_pds, member) - generate_executable_ds(hosts, src, dest, cobol_pds) - copy_uss_res = hosts.all.zos_copy( - src="{0}({1})".format(src, member), - dest=uss_dest, + + # copy src lib to USS dir + copy_res_uss = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(uss_dir_path), remote_src=True, executable=True, - force=True) - for result in copy_uss_res.contacted.values(): + ) + for result in copy_res_uss.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") == "{0}".format(uss_dir_path) + + # inspect USS dir contents + verify_exe_uss_ls = hosts.all.shell( + cmd='ls {0}/{1}'.format(uss_dir_path, src_lib.upper()) + ) + for v_exe_u_ls in verify_exe_uss_ls.contacted.values(): + assert v_exe_u_ls.get("rc") == 0 + assert "{0}\n{1}".format(src_lib.upper(), pgm_mem) + # run executables on USS verify_exe_uss = hosts.all.shell( - cmd="{0}".format(uss_dest) + cmd="{0}/{1}/{2}".format(uss_dir_path, src_lib.upper(), pgm_mem.lower()) ) for v_cp_u in verify_exe_uss.contacted.values(): assert v_cp_u.get("rc") == 0 - stdout = v_cp_u.get("stdout") - assert "SIMPLE HELLO WORLD" in str(stdout) + assert v_cp_u.get("stdout").strip() == COBOL_PRINT_STR + + verify_exe_uss = hosts.all.shell( + cmd="{0}/{1}/{2}".format(uss_dir_path, src_lib.upper(), pgm2_mem.lower()) + ) + for v_cp_u in verify_exe_uss.contacted.values(): + assert v_cp_u.get("rc") == 0 + assert v_cp_u.get("stdout").strip() == COBOL_PRINT_STR2 + + + # copy USS dir to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src="{0}/{1}".format(uss_dir_path, src_lib.upper()), + dest="{0}".format(dest_lib), + remote_src=True, + executable=True, + aliases=False + ) + # copy USS dir to dest library pds w aliases + copy_res_aliases = hosts.all.zos_copy( + src="{0}{1}".format(uss_dir_path, src_lib.upper()), + dest="{0}".format(dest_lib_aliases), + remote_src=True, + executable=True, + aliases=True + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}".format(dest_lib) + + for result in copy_res_aliases.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}".format(dest_lib_aliases) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + verify_copy_mls_aliases = hosts.all.shell( + cmd="mls {0}".format(dest_lib_aliases), + executable=SHELL_EXECUTABLE + ) + + for v_cp in verify_copy_mls.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + mls_alias_str = "ALIAS({0})".format(pgm_mem_alias) + mls_alias_str2 = "ALIAS({0})".format(pgm2_mem_alias) + assert mls_alias_str not in stdout + assert mls_alias_str2 not in stdout + + for v_cp in verify_copy_mls_aliases.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + expected_mls_str = "{0} ALIAS({1})".format(pgm_mem, pgm_mem_alias) + expected_mls_str2 = "{0} ALIAS({1})".format(pgm2_mem, pgm2_mem_alias) + assert expected_mls_str in stdout + assert expected_mls_str2 in stdout + + # verify pgms remain executable + pgm_output_map = { + (dest_lib, pgm_mem, COBOL_PRINT_STR), + (dest_lib_aliases, pgm_mem, COBOL_PRINT_STR), + (dest_lib_aliases, pgm_mem_alias, COBOL_PRINT_STR), + (dest_lib, pgm2_mem, COBOL_PRINT_STR2), + (dest_lib_aliases, pgm2_mem, COBOL_PRINT_STR2), + (dest_lib_aliases, pgm2_mem_alias, COBOL_PRINT_STR2) + } + + for steplib, pgm, output in pgm_output_map: + validate_loadlib_pgm(hosts, steplib=steplib, pgm_name=pgm, expected_output_str=output) + finally: - hosts.all.zos_data_set(name=dest, state="absent") - hosts.all.zos_data_set(name=src, state="absent") - hosts.all.zos_data_set(name=cobol_pds, state="absent") - hosts.all.file(name=uss_dest, state="absent") + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + hosts.all.file(path=uss_dir_path, state="absent") @pytest.mark.uss @@ -3001,6 +3573,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse +@pytest.mark.aliases @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module @@ -3020,6 +3593,14 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts.all.file(path=dest_path, state="directory") + # ensure aliases:True errors out for non-text member copy + copy_aliases_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, aliases=True) + for result in copy_aliases_res.contacted.values(): + error_msg = "Alias support for text-based data sets is not available" + assert result.get("failed") is True + assert result.get("changed") is False + assert error_msg in result.get("msg") + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True) stat_res = hosts.all.stat(path=dest_path) @@ -3037,6 +3618,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse +@pytest.mark.aliases @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module @@ -3052,6 +3634,14 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): executable=SHELL_EXECUTABLE ) + # ensure aliases:True errors out for non-text member copy + copy_aliases_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, aliases=True) + for result in copy_aliases_res.contacted.values(): + error_msg = "Alias support for text-based data sets is not available" + assert result.get("failed") is True + assert result.get("changed") is False + assert error_msg in result.get("msg") + copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True) stat_res = hosts.all.stat(path=dest_path) verify_copy = hosts.all.shell( diff --git a/tests/pytest.ini b/tests/pytest.ini index a9324aaae..4226de838 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -8,4 +8,6 @@ markers = seq: sequential data sets test cases. pdse: partitioned data sets test cases. vsam: VSAM data sets test cases. - template: Jinja2 templating test cases. \ No newline at end of file + template: Jinja2 templating test cases. + aliases: aliases option test cases. + loadlib: executable copy test cases. \ No newline at end of file From bad20a85eb0f5cd805eed4f1398daf7f0a380446 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 13 Oct 2023 22:01:29 -0700 Subject: [PATCH 202/495] New module zos_script (#961) * First version of the action plugin and module doc * Added remote execution * Fixed handling and cleanup of temp files * Fixed mode setting for scripts * Fixed undefined variable error in action plugin * Fixed chdir when running command * Added creates and removes options * Changed encoding validation * Filled out docs for the module * Added examples * Filled out RETURN block for module * Enhanced error messages * Generated docs for zos_script * Added first tests for zos_script * Reordered args * Added mode check to remote script test * Fixed template rendering * Fixed tests * Added tests for error handling and templates * Fixed a sanity error when returning a failure JSON * Updated ignore files * Updated module docs * Updated repository templates for issues * Fixed whitespace in docs * Updated tmp_path description * Updated notes in documentation * Removed use of local_charset * Removed private args This commit finishes the work needed to remove two sanity tests exceptions. * Fixed permissions for remote scripts * Updated module documentation * Updated documentation for tmp_path --- .github/ISSUE_TEMPLATE/bug_issue.yml | 1 + .../ISSUE_TEMPLATE/collaboration_issue.yml | 1 + .github/ISSUE_TEMPLATE/doc_issue.yml | 1 + .github/ISSUE_TEMPLATE/enabler_issue.yml | 1 + .../enhancement_feature.issue.yml | 1 + docs/source/modules/zos_script.rst | 391 +++++++++++++++ plugins/action/zos_script.py | 161 ++++++ plugins/modules/zos_script.py | 397 +++++++++++++++ .../modules/test_zos_script_func.py | 458 ++++++++++++++++++ tests/sanity/ignore-2.10.txt | 3 + tests/sanity/ignore-2.11.txt | 3 + tests/sanity/ignore-2.12.txt | 3 + tests/sanity/ignore-2.13.txt | 1 + tests/sanity/ignore-2.14.txt | 1 + tests/sanity/ignore-2.15.txt | 1 + tests/sanity/ignore-2.16.txt | 1 + tests/sanity/ignore-2.9.txt | 3 + 17 files changed, 1428 insertions(+) create mode 100644 docs/source/modules/zos_script.rst create mode 100644 plugins/action/zos_script.py create mode 100644 plugins/modules/zos_script.py create mode 100644 tests/functional/modules/test_zos_script_func.py diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index d50883065..e03266e7b 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -127,6 +127,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command validations: required: false diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index c9ac9f151..f601ce1e1 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -144,6 +144,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command validations: required: false diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index 5583ce5c1..38a8f1818 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -75,6 +75,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command validations: required: false diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index abc9f16c2..d520148dc 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -40,6 +40,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command validations: required: false diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index 3e1763091..f190ee70c 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -39,6 +39,7 @@ body: - zos_operator - zos_operator_action_query - zos_ping + - zos_script - zos_tso_command - zos_unarchive validations: diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst new file mode 100644 index 000000000..6fc9a0ece --- /dev/null +++ b/docs/source/modules/zos_script.rst @@ -0,0 +1,391 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_script.py + +.. _zos_script_module: + + +zos_script -- Run scripts in z/OS +================================= + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- The `zos_script <./zos_script.html>`_ module runs a local or remote script in the remote machine. + + + + + +Parameters +---------- + + +chdir + Change the script's working directory to this path. + + When not specified, the script will run in the user's home directory on the remote machine. + + | **required**: False + | **type**: str + + +cmd + Path to the local or remote script followed by optional arguments. + + If the script path contains spaces, make sure to enclose it in two pairs of quotes. + + Arguments may need to be escaped so the shell in the remote machine handles them correctly. + + | **required**: True + | **type**: str + + +creates + Path to a file in the remote machine. If it exists, the script will not be executed. + + | **required**: False + | **type**: str + + +encoding + Specifies which encodings the script should be converted from and to. + + If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. + + | **required**: False + | **type**: dict + + + from + The encoding to be converted from. + + | **required**: True + | **type**: str + + + to + The encoding to be converted to. + + | **required**: True + | **type**: str + + + +executable + Path of an executable in the remote machine to invoke the script with. + + When not specified, the system will assume the script is interpreted REXX and try to run it as such. Make sure to include a comment identifying the script as REXX at the start of the file in this case. + + | **required**: False + | **type**: str + + +remote_src + If set to ``false``, the module will search the script in the controller. + + If set to ``true``, the module will search the script in the remote machine. + + | **required**: False + | **type**: bool + + +removes + Path to a file in the remote machine. If it does not exist, the script will not be executed. + + | **required**: False + | **type**: str + + +tmp_path + Path in the remote machine where local scripts will be temporarily copied to. + + When not specified, the module will copy local scripts to the default temporary path for the user. + + If ``tmp_path`` does not exist in the remote machine, the module will not create it. + + | **required**: False + | **type**: str + + +use_template + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + + Only valid when ``src`` is a local file or directory. + + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + + | **required**: False + | **type**: bool + + +template_parameters + Options to set the way Jinja2 will process templates. + + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + + These options are ignored unless ``use_template`` is true. + + | **required**: False + | **type**: dict + + + variable_start_string + Marker for the beginning of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: {{ + + + variable_end_string + Marker for the end of a statement to print a variable in Jinja2. + + | **required**: False + | **type**: str + | **default**: }} + + + block_start_string + Marker for the beginning of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: {% + + + block_end_string + Marker for the end of a block in Jinja2. + + | **required**: False + | **type**: str + | **default**: %} + + + comment_start_string + Marker for the beginning of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: {# + + + comment_end_string + Marker for the end of a comment in Jinja2. + + | **required**: False + | **type**: str + | **default**: #} + + + line_statement_prefix + Prefix used by Jinja2 to identify line-based statements. + + | **required**: False + | **type**: str + + + line_comment_prefix + Prefix used by Jinja2 to identify comment lines. + + | **required**: False + | **type**: str + + + lstrip_blocks + Whether Jinja2 should strip leading spaces from the start of a line to a block. + + | **required**: False + | **type**: bool + + + trim_blocks + Whether Jinja2 should remove the first newline after a block is removed. + + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + + | **required**: False + | **type**: bool + | **default**: True + + + keep_trailing_newline + Whether Jinja2 should keep the first trailing newline at the end of a template after rendering. + + | **required**: False + | **type**: bool + + + newline_sequence + Sequence that starts a newline in a template. + + | **required**: False + | **type**: str + | **default**: \\n + | **choices**: \\n, \\r, \\r\\n + + + auto_reload + Whether to reload a template file when it has changed after the task has started. + + | **required**: False + | **type**: bool + + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Run a local REXX script on the managed z/OS node. + zos_script: + cmd: ./scripts/HELLO + + - name: Run a local REXX script with args on the managed z/OS node. + zos_script: + cmd: ./scripts/ARGS "1,2" + + - name: Run a remote REXX script while changing its working directory. + zos_script: + cmd: /u/user/scripts/ARGS "1,2" + remote_src: true + chdir: /u/user/output_dir + + - name: Run a local Python script that uses a custom tmp_path. + zos_script: + cmd: ./scripts/program.py + executable: /usr/bin/python3 + tmp_path: /usr/tmp/ibm_zos_core + + - name: Run a local script made from a template. + zos_script: + cmd: ./templates/PROGRAM + use_template: true + + - name: Run a script only when a file is not present. + zos_script: + cmd: ./scripts/PROGRAM + creates: /u/user/pgm_result.txt + + - name: Run a script only when a file is already present on the remote machine. + zos_script: + cmd: ./scripts/PROGRAM + removes: /u/user/pgm_input.txt + + + + +Notes +----- + +.. note:: + When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. + + Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for the script will be restored by the module before the task ends. + + If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error ``BPXW0003I``. + + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine. + + `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + + This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. + + + +See Also +-------- + +.. seealso:: + + - :ref:`zos_copy_module` + - :ref:`zos_tso_command_module` + + + + +Return Values +------------- + + +cmd + Original command issued by the user. + + | **returned**: changed + | **type**: str + | **sample**: ./scripts/PROGRAM + +remote_cmd + Command executed on the remote machine. Will show the executable path used, and when running local scripts, will also show the temporary file used. + + | **returned**: changed + | **type**: str + | **sample**: /tmp/zos_script.jycqqfny.ARGS 1,2 + +msg + Failure or skip message returned by the module. + + | **returned**: failure or skipped + | **type**: str + | **sample**: File /u/user/file.txt is already missing on the system, skipping script + +rc + Return code of the script. + + | **returned**: changed + | **type**: int + | **sample**: 16 + +stdout + The STDOUT from the script, may be empty. + + | **returned**: changed + | **type**: str + | **sample**: Allocation to SYSEXEC completed. + +stderr + The STDERR from the script, may be empty. + + | **returned**: changed + | **type**: str + | **sample**: An error has ocurred. + +stdout_lines + List of strings containing individual lines from STDOUT. + + | **returned**: changed + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "Allocation to SYSEXEC completed." + ] + +stderr_lines + List of strings containing individual lines from STDERR. + + | **returned**: changed + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "An error has ocurred" + ] + diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py new file mode 100644 index 000000000..a17934ac4 --- /dev/null +++ b/plugins/action/zos_script.py @@ -0,0 +1,161 @@ +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +import copy +import shlex +from os import path + +from ansible.plugins.action import ActionBase +from ansible.module_utils.parsing.convert_bool import boolean +from ansible_collections.ibm.ibm_zos_core.plugins.action.zos_copy import ActionModule as ZosCopyActionModule + +from ansible.utils.display import Display +display = Display() + + +class ActionModule(ActionBase): + def run(self, tmp=None, task_vars=None): + if task_vars is None: + task_vars = dict() + + result = super(ActionModule, self).run(tmp, task_vars) + if result.get("skipped"): + return result + + module_args = self._task.args.copy() + + # First separating the command into the script path and its args + # if they are present. + cmd_parts = shlex.split(module_args.get('cmd')) + if len(cmd_parts) == 0: + result.update(dict( + changed=False, + failed=True, + invocation=dict(module_args=self._task.args), + msg="The command could not be validated, please check that it conforms to shell syntax." + )) + return result + + script_path = cmd_parts[0] + script_args = cmd_parts[1] if len(cmd_parts) > 1 else "" + remote_src = self._process_boolean(module_args.get('remote_src')) + user_cmd = tempfile_path = None + + # Copying the script when it's a local file. + if not remote_src: + script_path = path.abspath(path.normpath(script_path)) + script_name = path.basename(script_path) + tmp_path = module_args.get('tmp_path') + + # Getting a temporary path for the script. + tempfile_args = dict( + state="file", + path=tmp_path, + prefix="zos_script.", + suffix=".{0}".format(script_name) + ) + + tempfile_result = self._execute_module( + module_name="ansible.builtin.tempfile", + module_args=tempfile_args, + task_vars=task_vars + ) + result.update(tempfile_result) + + if not result.get("changed") or result.get("failed"): + result.update(dict( + changed=False, + failed=True, + invocation=dict( + module_args=self._task.args, + tempfile_args=tempfile_result.get('invocation', dict()).get('module_args') + ), + msg="An error ocurred while trying to create a tempfile for the script." + )) + return result + + tempfile_path = tempfile_result.get('path') + + # Letting zos_copy handle the transfer of the script. + zos_copy_args = dict( + src=script_path, + dest=tempfile_path, + force=True, + is_binary=False, + encoding=module_args.get('encoding'), + use_template=module_args.get('use_template', False), + template_parameters=module_args.get('template_parameters', dict()) + ) + copy_task = copy.deepcopy(self._task) + copy_task.args = zos_copy_args + zos_copy_action_plugin = ZosCopyActionModule( + task=copy_task, + connection=self._connection, + play_context=self._play_context, + loader=self._loader, + templar=self._templar, + shared_loader_obj=self._shared_loader_obj + ) + + zos_copy_result = zos_copy_action_plugin.run(task_vars=task_vars) + result.update(zos_copy_result) + + if not result.get("changed") or result.get("failed"): + result.update(dict( + changed=False, + failed=True, + invocation=dict( + module_args=self._task.args, + tempfile_args=tempfile_result.get('invocation', dict()).get('module_args'), + zos_copy_args=zos_copy_result.get('invocation', dict()).get('module_args') + ), + msg="An error ocurred while trying to copy the script to the managed node: {0}.".format( + zos_copy_result.get('msg') + ) + )) + return result + + # We're going to shadow the command supplied by the user with the remote + # tempfile we just created. + user_cmd = module_args.get('cmd') + module_args['cmd'] = '{0} {1}'.format(tempfile_path, script_args) + + module_result = self._execute_module( + module_name='ibm.ibm_zos_core.zos_script', + module_args=module_args, + task_vars=task_vars + ) + + result = module_result + if result.get('changed') and tempfile_path: + result['tempfile_path'] = tempfile_path + # The cmd field will return using the tempfile created, so we + # restore it to what the user supplied. + result['cmd'] = user_cmd + + if not remote_src: + self._remote_cleanup(tempfile_path) + + return result + + def _remote_cleanup(self, tempfile_path): + """Removes the temporary file in a managed node created for a local + script.""" + self._connection.exec_command("rm -f {0}".format(tempfile_path)) + + def _process_boolean(self, arg, default=False): + try: + return boolean(arg) + except TypeError: + return default diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py new file mode 100644 index 000000000..15699c4a1 --- /dev/null +++ b/plugins/modules/zos_script.py @@ -0,0 +1,397 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +DOCUMENTATION = r""" +--- +module: zos_script +version_added: '1.8.0' +author: + - "Ivan Moreno (@rexemin)" +short_description: Run scripts in z/OS +description: + - The L(zos_script,./zos_script.html) module runs a local or remote script + in the remote machine. + +options: + chdir: + description: + - Change the script's working directory to this path. + - When not specified, the script will run in the user's + home directory on the remote machine. + type: str + required: false + cmd: + description: + - Path to the local or remote script followed by optional arguments. + - If the script path contains spaces, make sure to enclose it in two + pairs of quotes. + - Arguments may need to be escaped so the shell in the remote machine + handles them correctly. + type: str + required: true + creates: + description: + - Path to a file in the remote machine. If it exists, the + script will not be executed. + type: str + required: false + encoding: + description: + - Specifies which encodings the script should be converted from and to. + - If C(encoding) is not provided, the module determines which local + and remote charsets to convert the data from and to. + type: dict + required: false + suboptions: + from: + description: + - The encoding to be converted from. + required: true + type: str + to: + description: + - The encoding to be converted to. + required: true + type: str + executable: + description: + - Path of an executable in the remote machine to invoke the + script with. + - When not specified, the system will assume the script is + interpreted REXX and try to run it as such. Make sure to + include a comment identifying the script as REXX at the + start of the file in this case. + type: str + required: false + remote_src: + description: + - If set to C(false), the module will search the script in the + controller. + - If set to C(true), the module will search the script in the + remote machine. + type: bool + required: false + removes: + description: + - Path to a file in the remote machine. If it does not exist, the + script will not be executed. + type: str + required: false + tmp_path: + description: + - Directory path in the remote machine where local scripts will be + temporarily copied to. + - When not specified, the module will copy local scripts to + the default temporary path for the user. + - If C(tmp_path) does not exist in the remote machine, the + module will not create it. + - All scripts copied to C(tmp_path) will be removed from the managed + node before the module finishes executing. + type: str + required: false + +extends_documentation_fragment: + - ibm.ibm_zos_core.template + +notes: + - When executing local scripts, temporary storage will be used + on the remote z/OS system. The size of the temporary storage will + correspond to the size of the file being copied. + - Execution permissions for the group assigned to the script will be + added to remote scripts. The original permissions for remote scripts will + be restored by the module before the task ends. + - The module will only add execution permissions for the file owner. + - If executing REXX scripts, make sure to include a newline character on + each line of the file. Otherwise, the interpreter may fail and return + error C(BPXW0003I). + - For supported character sets used to encode data, refer to the + L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). + - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to + the remote machine. + - L(zos_copy,./zos_copy.html) uses SFTP (Secure File Transfer Protocol) + for the underlying transfer protocol; Co:Z SFTP is not supported. In + the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from + using Co:Z thus falling back to using standard SFTP. + - This module executes scripts inside z/OS UNIX System Services. For + running REXX scripts contained in data sets or CLISTs, consider issuing a TSO + command with L(zos_tso_command,./zos_tso_command.html). + - The community script module does not rely on Python to execute scripts on a + managed node, while this module does. Python must be present on the + remote machine. + +seealso: + - module: zos_copy + - module: zos_tso_command +""" + +EXAMPLES = r""" +- name: Run a local REXX script on the managed z/OS node. + zos_script: + cmd: ./scripts/HELLO + +- name: Run a local REXX script with args on the managed z/OS node. + zos_script: + cmd: ./scripts/ARGS "1,2" + +- name: Run a remote REXX script while changing its working directory. + zos_script: + cmd: /u/user/scripts/ARGS "1,2" + remote_src: true + chdir: /u/user/output_dir + +- name: Run a local Python script that uses a custom tmp_path. + zos_script: + cmd: ./scripts/program.py + executable: /usr/bin/python3 + tmp_path: /usr/tmp/ibm_zos_core + +- name: Run a local script made from a template. + zos_script: + cmd: ./templates/PROGRAM + use_template: true + +- name: Run a script only when a file is not present. + zos_script: + cmd: ./scripts/PROGRAM + creates: /u/user/pgm_result.txt + +- name: Run a script only when a file is already present on the remote machine. + zos_script: + cmd: ./scripts/PROGRAM + removes: /u/user/pgm_input.txt +""" + +RETURN = r""" +cmd: + description: Original command issued by the user. + returned: changed + type: str + sample: ./scripts/PROGRAM +remote_cmd: + description: + Command executed on the remote machine. Will show the executable + path used, and when running local scripts, will also show the + temporary file used. + returned: changed + type: str + sample: /tmp/zos_script.jycqqfny.ARGS 1,2 +msg: + description: Failure or skip message returned by the module. + returned: failure or skipped + type: str + sample: + File /u/user/file.txt is already missing on the system, skipping script +rc: + description: Return code of the script. + returned: changed + type: int + sample: 16 +stdout: + description: The STDOUT from the script, may be empty. + returned: changed + type: str + sample: Allocation to SYSEXEC completed. +stderr: + description: The STDERR from the script, may be empty. + returned: changed + type: str + sample: An error has ocurred. +stdout_lines: + description: List of strings containing individual lines from STDOUT. + returned: changed + type: list + sample: ["Allocation to SYSEXEC completed."] +stderr_lines: + description: List of strings containing individual lines from STDERR. + returned: changed + type: list + sample: ["An error has ocurred"] +""" + + +import os +import stat +import shlex + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser +) + + +def run_module(): + module = AnsibleModule( + argument_spec=dict( + chdir=dict(type='str', required=False), + cmd=dict(type='str', required=True), + creates=dict(type='str', required=False), + encoding=dict( + type='dict', + required=False, + options={ + 'from': dict(type='str', required=True,), + 'to': dict(type='str', required=True,) + } + ), + executable=dict(type='str', required=False), + remote_src=dict(type='bool', required=False), + removes=dict(type='str', required=False), + tmp_path=dict(type='str', required=False), + use_template=dict(type='bool', default=False), + template_parameters=dict( + type='dict', + required=False, + options=dict( + variable_start_string=dict(type='str', default='{{'), + variable_end_string=dict(type='str', default='}}'), + block_start_string=dict(type='str', default='{%'), + block_end_string=dict(type='str', default='%}'), + comment_start_string=dict(type='str', default='{#'), + comment_end_string=dict(type='str', default='#}'), + line_statement_prefix=dict(type='str', required=False), + line_comment_prefix=dict(type='str', required=False), + lstrip_blocks=dict(type='bool', default=False), + trim_blocks=dict(type='bool', default=True), + keep_trailing_newline=dict(type='bool', default=False), + newline_sequence=dict( + type='str', + default='\n', + choices=['\n', '\r', '\r\n'] + ), + auto_reload=dict(type='bool', default=False), + ) + ), + ), + supports_check_mode=False + ) + + args_def = dict( + chdir=dict(arg_type='path', required=False), + cmd=dict(arg_type='str', required=True), + creates=dict(arg_type='path', required=False), + executable=dict(arg_type='path', required=False), + remote_src=dict(arg_type='bool', required=False), + removes=dict(arg_type='path', required=False), + tmp_path=dict(arg_type='path', required=False), + use_template=dict(arg_type='bool', required=False), + template_parameters=dict( + arg_type='dict', + required=False, + options=dict( + variable_start_string=dict(arg_type='str', required=False), + variable_end_string=dict(arg_type='str', required=False), + block_start_string=dict(arg_type='str', required=False), + block_end_string=dict(arg_type='str', required=False), + comment_start_string=dict(arg_type='str', required=False), + comment_end_string=dict(arg_type='str', required=False), + line_statement_prefix=dict(arg_type='str', required=False), + line_comment_prefix=dict(arg_type='str', required=False), + lstrip_blocks=dict(arg_type='bool', required=False), + trim_blocks=dict(arg_type='bool', required=False), + keep_trailing_newline=dict(arg_type='bool', required=False), + newline_sequence=dict(arg_type='str', required=False), + auto_reload=dict(arg_type='bool', required=False), + ) + ), + ) + + try: + parser = better_arg_parser.BetterArgParser(args_def) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json( + msg='Parameter verification failed.', + stderr=str(err) + ) + + cmd_str = module.params.get('cmd') + cmd_parts = shlex.split(cmd_str) + script_path = cmd_parts[0] + chdir = module.params.get('chdir') + executable = module.params.get('executable') + creates = module.params.get('creates') + removes = module.params.get('removes') + + if creates and os.path.exists(creates): + result = dict( + changed=False, + skipped=True, + msg='File {0} already exists on the system, skipping script'.format(creates) + ) + module.exit_json(**result) + + if removes and not os.path.exists(removes): + result = dict( + changed=False, + skipped=True, + msg='File {0} is already missing on the system, skipping script'.format(removes) + ) + module.exit_json(**result) + + if chdir and not os.path.exists(chdir): + module.fail_json( + msg='The given chdir {0} does not exist on the system.'.format(chdir) + ) + + # Adding owner execute permissions to the script. + # The module will fail if the Ansible user is not the owner! + script_permissions = os.lstat(script_path).st_mode + os.chmod( + script_path, + script_permissions | stat.S_IXUSR + ) + + if executable: + cmd_str = "{0} {1}".format(executable, cmd_str) + + cmd_str = cmd_str.strip() + script_rc, stdout, stderr = module.run_command( + cmd_str, + cwd=chdir + ) + + result = dict( + changed=True, + cmd=module.params.get('cmd'), + remote_cmd=cmd_str, + rc=script_rc, + stdout=stdout, + stderr=stderr, + stdout_lines=stdout.split('\n'), + stderr_lines=stderr.split('\n'), + ) + + # Reverting script's permissions. + os.chmod(script_path, script_permissions) + + if script_rc != 0 or stderr: + result['msg'] = 'The script terminated with an error' + module.fail_json( + **result + ) + + module.exit_json(**result) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/tests/functional/modules/test_zos_script_func.py b/tests/functional/modules/test_zos_script_func.py new file mode 100644 index 000000000..2bdae2a66 --- /dev/null +++ b/tests/functional/modules/test_zos_script_func.py @@ -0,0 +1,458 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2023 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import, division, print_function + +import pytest +__metaclass__ = type + + +# Using || to concatenate strings without extra spaces. +rexx_script_args = """/* REXX */ +parse arg A ',' B +say 'args are ' || A || ',' || B +return 0 + +""" + +# For validating that chdir gets honored by the module. +rexx_script_chdir = """/* REXX */ +address syscall 'getcwd cwd' +say cwd +return 0 + +""" + +# For testing a default template. Note that the Jinja variable is static +# and it's always called playbook_msg. +rexx_script_template_default = """/* REXX */ +say '{{ playbook_msg }}' +return 0 + +""" + +# For testing templates with custom markers. Here the markers are static +# too (always '((', '))', '&$' and '$&'). +rexx_script_template_custom = """/* REXX */ +&$ This is a comment that should create problems if not substituted $& +say '(( playbook_msg ))' +return 0 + +""" + + +def create_script_content(msg, script_type): + """Returns a string containing either a valid REXX script or a valid + Python script. The script will print the given message.""" + if script_type == 'rexx': + # Without the comment in the first line, the interpreter will not be + # able to run the script. + # Without the last blank line, the REXX interpreter will throw + # an error. + return """/* REXX */ +say '{0}' +return 0 + +""".format(msg) + elif script_type == 'python': + return """msg = "{0}" +print(msg) +""".format(msg) + else: + raise Exception('Type {0} is not valid.'.format(script_type)) + + +def create_python_script_stderr(msg, rc): + """Returns a Python script that will write out to STDERR and return + a given RC. The RC can be 0, but for testing it would be better if it + was something else.""" + return """import sys +print('{0}', file=sys.stderr) +exit({1}) +""".format(msg, rc) + + +def create_local_file(content, suffix): + """Creates a tempfile that has the given content.""" + import os + import tempfile + + fd, file_path = tempfile.mkstemp( + prefix='zos_script', + suffix=suffix + ) + os.close(fd) + + with open(file_path, 'w') as f: + f.write(content) + + return file_path + + +def test_rexx_script_without_args(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Success' + rexx_script = create_script_content(msg, 'rexx') + script_path = create_local_file(rexx_script, 'rexx') + + zos_script_result = hosts.all.zos_script( + cmd=script_path + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == msg + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_rexx_remote_script(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Success' + rexx_script = create_script_content(msg, 'rexx') + local_script = create_local_file(rexx_script, 'rexx') + + # Using zos_copy instead of doing an echo with shell to avoid trouble + # with how single quotes are handled. + script_path = '/tmp/zos_script_test_script' + copy_result = hosts.all.zos_copy( + src=local_script, + dest=script_path, + mode='600' + ) + for result in copy_result.contacted.values(): + assert result.get('changed') is True + + pre_stat_info = hosts.all.stat(path=script_path) + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + remote_src=True + ) + + post_stat_info = hosts.all.stat(path=script_path) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == msg + assert result.get('stderr', '') == '' + # Checking that permissions remained unchanged after executing + # zos_script. + for pre_stat, post_stat in zip( + pre_stat_info.contacted.values(), + post_stat_info.contacted.values() + ): + assert pre_stat.get('mode') == post_stat.get('mode') + finally: + if os.path.exists(local_script): + os.remove(local_script) + hosts.all.file(path=script_path, state='absent') + + +def test_rexx_script_with_args(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = rexx_script_args + script_path = create_local_file(rexx_script, 'rexx') + + args = '1,2' + cmd = "{0} '{1}'".format(script_path, args) + + zos_script_result = hosts.all.zos_script( + cmd=cmd + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == 'args are {0}'.format(args) + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_rexx_script_chdir(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = rexx_script_chdir + script_path = create_local_file(rexx_script, 'rexx') + + tmp_remote_dir = '/zos_script_tests' + file_result = hosts.all.file( + path=tmp_remote_dir, + state='directory' + ) + + for result in file_result.contacted.values(): + assert result.get('changed') is True + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + chdir=tmp_remote_dir + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == tmp_remote_dir + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + hosts.all.file(path=tmp_remote_dir, state='absent') + + +def test_rexx_script_tmp_path(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = create_script_content('tmp_path test', 'rexx') + script_path = create_local_file(rexx_script, 'rexx') + + tmp_remote_dir = '/tmp/zos_script_tests' + file_result = hosts.all.file( + path=tmp_remote_dir, + state='directory' + ) + + for result in file_result.contacted.values(): + assert result.get('changed') is True + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + tmp_path=tmp_remote_dir + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stderr', '') == '' + assert tmp_remote_dir in result.get('remote_cmd', '') + finally: + if os.path.exists(script_path): + os.remove(script_path) + hosts.all.file(path=tmp_remote_dir, state='absent') + + +def test_python_script(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = "Success" + python_script = create_script_content(msg, 'python') + script_path = create_local_file(python_script, 'python') + + python_executable = hosts['options']['ansible_python_path'] + zos_script_result = hosts.all.zos_script( + cmd=script_path, + executable=python_executable + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == msg + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_rexx_script_creates_option(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Success' + rexx_script = create_script_content(msg, 'rexx') + script_path = create_local_file(rexx_script, 'rexx') + + remote_file = '/tmp/zos_script_test_creates.txt' + file_result = hosts.all.file( + path=remote_file, + state='touch' + ) + + for result in file_result.contacted.values(): + assert result.get('changed') is True + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + creates=remote_file + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is False + assert result.get('skipped') is True + assert result.get('failed', False) is False + finally: + if os.path.exists(script_path): + os.remove(script_path) + hosts.all.file(path=remote_file, state='absent') + + +def test_rexx_script_removes_option(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Success' + rexx_script = create_script_content(msg, 'rexx') + script_path = create_local_file(rexx_script, 'rexx') + + # Not actually creating this file on the remote hosts. + remote_file = '/tmp/zos_script_test_removes.txt' + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + removes=remote_file + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is False + assert result.get('skipped') is True + assert result.get('failed', False) is False + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_script_template_with_default_markers(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = rexx_script_template_default + script_path = create_local_file(rexx_script, 'rexx') + + # Updating the vars available to the tasks. + template_vars = dict( + playbook_msg='Success' + ) + for host in hosts['options']['inventory_manager']._inventory.hosts.values(): + host.vars.update(template_vars) + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + use_template=True + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == template_vars['playbook_msg'] + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_script_template_with_custom_markers(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + rexx_script = rexx_script_template_custom + script_path = create_local_file(rexx_script, 'rexx') + + # Updating the vars available to the tasks. + template_vars = dict( + playbook_msg='Success' + ) + for host in hosts['options']['inventory_manager']._inventory.hosts.values(): + host.vars.update(template_vars) + + zos_script_result = hosts.all.zos_script( + cmd=script_path, + use_template=True, + template_parameters=dict( + variable_start_string='((', + variable_end_string='))', + comment_start_string='&$', + comment_end_string='$&', + ) + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed', False) is False + assert result.get('rc') == 0 + assert result.get('stdout', '').strip() == template_vars['playbook_msg'] + assert result.get('stderr', '') == '' + finally: + if os.path.exists(script_path): + os.remove(script_path) + + +def test_python_script_with_stderr(ansible_zos_module): + import os + + hosts = ansible_zos_module + + try: + msg = 'Error' + rc = 1 + python_script = create_python_script_stderr(msg, rc) + script_path = create_local_file(python_script, 'python') + + python_executable = hosts['options']['ansible_python_path'] + zos_script_result = hosts.all.zos_script( + cmd=script_path, + executable=python_executable + ) + + for result in zos_script_result.contacted.values(): + assert result.get('changed') is True + assert result.get('failed') is True + assert result.get('rc') == rc + assert result.get('stdout', '') == '' + assert result.get('stderr', '').strip() == msg + finally: + if os.path.exists(script_path): + os.remove(script_path) diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index 74db3a282..8778d80f9 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -74,6 +74,9 @@ plugins/modules/zos_operator_action_query.py import-2.6!skip # Python 2.6 is uns plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_ping.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 420528c74..9ceaf3c97 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -74,6 +74,9 @@ plugins/modules/zos_operator_action_query.py import-2.6!skip # Python 2.6 is uns plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_ping.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index 420528c74..9ceaf3c97 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -74,6 +74,9 @@ plugins/modules/zos_operator_action_query.py import-2.6!skip # Python 2.6 is uns plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_ping.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index a496e3ac8..70d4764e1 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -30,6 +30,7 @@ plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-lice plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index a496e3ac8..70d4764e1 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -30,6 +30,7 @@ plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-lice plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index a496e3ac8..70d4764e1 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -30,6 +30,7 @@ plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-lice plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 8b4540038..a4835475f 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -30,6 +30,7 @@ plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-lice plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 plugins/modules/zos_ping.rexx validate-modules:python-syntax-error # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index 4a6c8a778..992ec6099 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -73,6 +73,9 @@ plugins/modules/zos_operator_action_query.py import-2.6!skip # Python 2.6 is uns plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_ping.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_ping.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py compile-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py import-2.6!skip # Python 2.6 is unsupported +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_tso_command.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_tso_command.py import-2.6!skip # Python 2.6 is unsupported From d41f438e52fea1be692e4413efed65b1d074b53d Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 10:58:31 -0400 Subject: [PATCH 203/495] main change to version checker for consolidation --- plugins/module_utils/job.py | 11 ++++--- plugins/module_utils/zoau_version_checker.py | 31 +++++++++++++------- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 3a9c3b35e..390405d33 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -31,10 +31,9 @@ list_dds = MissingZOAUImport() listing = MissingZOAUImport() -try: - from zoautil_py import ZOAU_API_VERSION -except Exception: - ZOAU_API_VERSION = "1.2.0" +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + zoau_version_checker +) def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, timeout=0, start_time=timer()): @@ -255,7 +254,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # this section only works on zoau 1.2.3/+ vvv - if ZOAU_API_VERSION > "1.2.2": + if zoau_version_checker.is_zoau_version_higher_than("1.2.2"): job["job_class"] = entry.job_class job["svc_class"] = entry.svc_class job["priority"] = entry.priority @@ -263,7 +262,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["creation_date"] = str(entry.creation_datetime)[0:10] job["creation_time"] = str(entry.creation_datetime)[12:] job["queue_position"] = entry.queue_position - if ZOAU_API_VERSION >= "1.2.4": + if zoau_version_checker.is_zoau_version_higher_than("1.2.3"): job["program_name"] = entry.program_name # this section only works on zoau 1.2.3/+ ^^^ diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index c88dac481..601ab6f16 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -13,8 +13,12 @@ from __future__ import absolute_import, division, print_function -import subprocess +# import subprocess +try: + from zoautil_py import ZOAU_API_VERSION +except Exception: + ZOAU_API_VERSION = "1.2.0" __metaclass__ = type @@ -96,16 +100,23 @@ def get_zoau_version_str(): Returns: { [int, int, int] } -- ZOAU version found in format [#,#,#]. There is a provision for a 4th level eg "v1.2.0.1". + """ - zoaversion_out = subprocess.run( - 'zoaversion', shell=True, capture_output=True, check=False - ) + # zoaversion_out = subprocess.run( + # 'zoaversion', shell=True, capture_output=True, check=False + # ) + # version_list = ( + # zoaversion_out + # .stdout + # .decode('UTF-8') + # .strip() + # .split(' ')[3][1:] + # .split('.') + #) + zoaversion_out = ZOAU_API_VERSION.split('.') + version_list = ( - zoaversion_out - .stdout - .decode('UTF-8') - .strip() - .split(' ')[3][1:] - .split('.') + ZOAU_API_VERSION.split('.') ) + return version_list From e547d27cc8111c6a46dfdf5005ff09e03c88080c Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 11:12:20 -0400 Subject: [PATCH 204/495] Removed old version checker/shell call, added changelog entry --- .../1018-internal-consolidate-version-checks.yml | 9 +++++++++ plugins/module_utils/zoau_version_checker.py | 13 ------------- 2 files changed, 9 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1018-internal-consolidate-version-checks.yml diff --git a/changelogs/fragments/1018-internal-consolidate-version-checks.yml b/changelogs/fragments/1018-internal-consolidate-version-checks.yml new file mode 100644 index 000000000..3698ed510 --- /dev/null +++ b/changelogs/fragments/1018-internal-consolidate-version-checks.yml @@ -0,0 +1,9 @@ +trivial: +- zoau_version_check - Change shell call to include call, for higher responsivity. + (https://github.com/ansible-collections/ibm_zos_core/pull/1027) +- zos_operator - Use new version check + (https://github.com/ansible-collections/ibm_zos_core/pull/1027) +- zos_operator_action_query - Use new version check + (https://github.com/ansible-collections/ibm_zos_core/pull/1027) +- utils/job.py - Use new version check + (https://github.com/ansible-collections/ibm_zos_core/pull/1027) diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index 601ab6f16..41dd35276 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -102,19 +102,6 @@ def get_zoau_version_str(): provision for a 4th level eg "v1.2.0.1". """ - # zoaversion_out = subprocess.run( - # 'zoaversion', shell=True, capture_output=True, check=False - # ) - # version_list = ( - # zoaversion_out - # .stdout - # .decode('UTF-8') - # .strip() - # .split(' ')[3][1:] - # .split('.') - #) - zoaversion_out = ZOAU_API_VERSION.split('.') - version_list = ( ZOAU_API_VERSION.split('.') ) From c3d9bd12137a42daa7aebd354a44e46796ae4965 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 15:00:37 -0400 Subject: [PATCH 205/495] Changed location of duration value set, so it will always be populated. --- plugins/module_utils/job.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 390405d33..30cb47bb9 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -280,6 +280,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T sleep(1) list_of_dds = list_dds(entry.id) + job["duration"] = duration + for single_dd in list_of_dds: dd = {} @@ -354,11 +356,13 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["ret_code"]["msg"] = tmptext.strip() job["ret_code"]["msg_code"] = None job["ret_code"]["code"] = None - if len(list_of_dds) > 0: + + # if len(list_of_dds) > 0: # The duration should really only be returned for job submit but the code # is used job_output as well, for now we can ignore this point unless # we want to offer a wait_time_s for job output which might be reasonable. - job["duration"] = duration + # Note: Moved this to the upper time loop, so it should always be populated. + # job["duration"] = duration final_entries.append(job) if not final_entries: From faa129ed25ec938ceb86e7fb04fe1cb5d0ba233a Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 15:31:55 -0400 Subject: [PATCH 206/495] found another edge case where duration was not being returned. --- plugins/module_utils/job.py | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 30cb47bb9..cfe8c4a67 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -271,6 +271,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["content_type"] = "" job["ret_code"]["steps"] = [] job["ddnames"] = [] + job["duration"] = duration if dd_scan: list_of_dds = list_dds(entry.id) From 08fbae69b8b7d2cd7e9c9e988b3d24808d6d99c7 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 16 Oct 2023 16:08:58 -0400 Subject: [PATCH 207/495] added tolerance to test on max_rc, where duration is not always returned. --- tests/functional/modules/test_zos_job_submit_func.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index b93b448c7..44dfdbf01 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -529,7 +529,12 @@ def test_job_submit_max_rc(ansible_zos_module, args): #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" # - Consider using module zos_job_query to poll for a long running job or # increase option \\'wait_times_s` to a value greater than 10.", - if result.get('duration') >= args["wait_time_s"]: + if result.get('duration'): + duration = result.get('duration') + else: + duration = 0 + + if duration >= args["wait_time_s"]: re.search(r'long running job', repr(result.get("msg"))) else: assert re.search(r'non-zero', repr(result.get("msg"))) From 1de66d5895297540fe21da252736023ecba51dd1 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 16 Oct 2023 16:06:42 -0700 Subject: [PATCH 208/495] Staging v1.7.0 merge to main (#1019) (#1023) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Galaxy 1.7 updates * Update meta runtime to support ansible-core 2.14 or later * Update ibm_zos_core_meta.yml with updated version * Update readme to align to supported ansible versions and new urls * Added additional sanity ignore files to the exclude list * Added additional sanity ignore files to the exclude list for ansible-lint. * Update copyright yrs for source files that were overlooked * Remove requirements from module doc, rely on offerings minimum requirements, also zoau 1.2.1 never was supported * Add changelog summary for 1.7 * Adding generated antsibull-changelog release changelog and artifacts * Remove v1.7.0_summary, its no longer needed * Update release notes for ac 1.7.0 * Remove unsupported collection versions requiring a version of zoau that is EOS --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .ansible-lint | 4 + CHANGELOG.rst | 36 +--- README.md | 6 +- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 14 ++ docs/source/modules/zos_gather_facts.rst | 5 - docs/source/release_notes.rst | 202 +++++------------------ galaxy.yml | 6 +- meta/ibm_zos_core_meta.yml | 2 +- meta/runtime.yml | 2 +- plugins/action/zos_copy.py | 2 +- plugins/modules/zos_apf.py | 2 +- plugins/modules/zos_encode.py | 2 +- plugins/modules/zos_gather_facts.py | 2 - 14 files changed, 78 insertions(+), 209 deletions(-) diff --git a/.ansible-lint b/.ansible-lint index ac0ca0b26..7325803a2 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -25,7 +25,11 @@ exclude_paths: - tests/helpers - tests/requirements.txt - tests/unit + - tests/sanity/ignore-2.9.txt - tests/sanity/ignore-2.10.txt + - tests/sanity/ignore-2.11.txt + - tests/sanity/ignore-2.12.txt + - tests/sanity/ignore-2.13.txt - venv* parseable: true quiet: false diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c6b3a91e0..a7c787d05 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,38 +5,13 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.7.0-beta.2 -============= - -Release Summary ---------------- - -Release Date: '2023-08-21' -This changelog describes all changes made to the modules and plugins included -in this collection. The release date is the date the changelog is created. -For additional details such as required dependencies and availability review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - -Minor Changes -------------- - -- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - -Bugfixes --------- - -- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - -v1.7.0-beta.1 -============= +v1.7.0 +====== Release Summary --------------- -Release Date: '2023-07-26' +Release Date: '2023-10-09' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review @@ -51,6 +26,8 @@ Minor Changes ------------- - Add support for Jinja2 templates in zos_copy and zos_job_submit when using local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) +- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, space_type and type in the return output when the destination data set does not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) - zos_data_set - record format = 'F' has been added to support 'fixed' block records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) - zos_job_output - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) @@ -58,11 +35,14 @@ Minor Changes - zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. This change removes those resulting in a performance increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) - zos_job_query - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) - zos_job_submit - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). Bugfixes -------- - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) - zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) - zos_copy - Reported a false positive such that the response would have `changed=true` when copying from a source (src) or destination (dest) data set that was in use (DISP=SHR). This change now displays an appropriate error message and returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). diff --git a/README.md b/README.md index 5cbd6fd98..13f45889f 100644 --- a/README.md +++ b/README.md @@ -50,14 +50,14 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against **Ansible** and **Ansible Core** versions >=2.9,<2.16. -The Ansible and Ansible Core versions supported for this collection align to the +This collection has been tested against **Ansible Core** versions >=2.14. +The Ansible Core versions supported for this collection align to the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the [Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** and **ansible-core**. For **Ansible Automation Platform** (AAP) users, review the -[Ansible Automation Platform Certified Content](https://access.redhat.com/articles/3642632) +[Ansible Automation Platform Certified Content](https://access.redhat.com/support/articles/ansible-automation-platform-certified-content) and [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform) for more more information on supported versions of Ansible. diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index c07ea8e62..810d65965 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -126,4 +126,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.7.0-beta.2 +version: 1.7.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 753c8e318..6988760f9 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -875,6 +875,20 @@ releases: name: zos_volume_init namespace: '' release_date: '2023-04-26' + 1.7.0: + changes: + release_summary: 'Release Date: ''2023-10-09'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - v1.7.0_summary.yml + release_date: '2023-10-09' 1.7.0-beta.1: changes: bugfixes: diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 232cc26ba..63bd22701 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -22,11 +22,6 @@ Synopsis - Note, the module will fail fast if any unsupported options are provided. This is done to raise awareness of a failure in an automation setting. -Requirements ------------- - -- ZOAU 1.2.1 or later. - diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 9a7bdb059..de1a27013 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,47 +6,14 @@ Releases ======== -Version 1.7.0-beta.2 -==================== - -Minor Changes -------------- -- ``zos_archive`` - - - When xmit faces a space error in xmit operation because of dest or log data set being filled raises an appropriate error hint. - - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. - -- ``zos_unarchive`` - - - When copying to remote fails now a proper error message is displayed. - - When copying to remote if space_primary is not defined, then is defaulted to 5M. - -Bugfixes --------- -- ``zos_archive`` - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. - -Availability ------------- - -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS V2R3`_ or later -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. - -Version 1.7.0-beta.1 -==================== +Version 1.7.0 +============= New Modules ----------- -- ``zos_archive`` - archive files, data sets and extend archives on z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. -- ``zos_unarchive`` - unarchive files and data sets in z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. +- ``zos_archive`` - archive files, data sets and extend archives on z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. +- ``zos_unarchive`` - unarchive files and data sets on z/OS. Formats include, *bz2*, *gz*, *tar*, *zip*, *terse*, *xmit* and *pax*. Major Changes ------------- @@ -60,36 +27,48 @@ Minor Changes - displays the data set attributes when the destination does not exist and was created by the module. - reverts the logic that would automatically create backups in the event of a module failure leaving it up to the user to decide if a backup is needed. - ``zos_data_set`` - supports record format *F* (fixed) where one physical block on disk is one logical record and all the blocks and records are the same size. -- ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). +- ``zos_job_output`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v1.2.4 or later). - ``zos_job_query`` + - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). - removes unnecessary queries to find DDs improving the modules performance. -- ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v 1.2.4 or later). +- ``zos_job_submit`` - displays job information *asid*, *creation date*, *creation time*, *job class*, *priority*, *queue position*, *service class* and conditionally *program name* (when ZOAU is v1.2.4 or later). +- ``zos_archive`` + + - When XMIT encounters a space error because of the destination (dest) or log data set has reached capacity, the module raises an appropriate error message. + - When the destination (dest) data set space is not provided, then the module computes it using the source (src) given the pattern provided. + +- ``zos_unarchive`` + + - When copying to the z/OS managed node (remote_src) results in a failure, a proper error message is displayed + - When copying to the z/OS managed node (remote_src), if the option *primary_space* is not defined, then it is defaulted to 5M. Bugfixes -------- -- ``zos_data_set`` - fixes occasionally occurring orphaned VSAM cluster components such as INDEX when `present=absent`. -- ``zos_fetch`` - fixes the warning that appeared about the use of _play_context.verbosity. +- ``zos_data_set`` - fixes occasionally occurring orphaned VSAM cluster components such as INDEX when *present=absent*. +- ``zos_fetch`` - fixes the warning that appeared about the use of *_play_context.verbosity*. - ``zos_copy`` - - fixes the warning that appeared about the use of _play_context.verbosity. + - fixes the warning that appeared about the use of *_play_context.verbosity*. - fixes an issue where subdirectories would not be encoded. - fixes an issue where when mode was set, the mode was not applied to existing directories and files. - - displays a error message when copying into a data set that is being accessed by another process and no longer returns with `changed=true`. + - displays a error message when copying into a data set that is being accessed by another process and no longer returns with *changed=true*. -``zos_job_output`` - displays an appropriate error message for a job is not found in the spool. -``zos_operator`` - fixes the false reports that a command failed when keywords such as *error* were seen, the module now acts as a passthrough. +- ``zos_job_output`` - displays an appropriate error message for a job is not found in the spool. +- ``zos_operator`` - fixes the false reports that a command failed when keywords such as *error* were seen, the module now acts as a passthrough. +- ``zos_archive`` - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. @@ -152,7 +131,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -268,7 +247,7 @@ Availability Reference --------- -* Supported by `z/OS Version`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -307,7 +286,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -457,7 +436,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.8`_` - `3.9`_ * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -558,7 +537,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ v3.8.2 - `IBM Open Enterprise SDK for Python`_ v3.9.5 @@ -599,7 +578,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -645,7 +624,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -677,7 +656,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -800,7 +779,7 @@ Availability Reference --------- -* Supported by `z/OS V2R3`_ or later +* Supported by `z/OS®`_ V2R4 or later * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -822,115 +801,6 @@ Known issues "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. -Version 1.2.1 -============= - -Notes ------ - -* Update required -* Module changes - - * Noteworthy Python 2.x support - - * encode - removed TemporaryDirectory usage. - * zos_copy - fixed regex support, dictionary merge operation fix - * zos_fetch - fix quote import - -* Collection changes - - * Beginning this release, all sample playbooks previously included with the - collection will be made available on the `samples repository`_. The - `samples repository`_ explains the playbook concepts, - discusses z/OS administration, provides links to the samples support site, - blogs and other community resources. - -* Documentation changes - - * In this release, documentation related to playbook configuration has been - migrated to the `samples repository`_. Each sample contains a README that - explains what configurations must be made to run the sample playbook. - -.. _samples repository: - https://github.com/IBM/z_ansible_collections_samples/blob/main/README.md - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by IBM Open Enterprise Python for z/OS: 3.8.2 or later -* Supported by IBM Z Open Automation Utilities 1.0.3 PTF UI70435 -* Supported by z/OS V2R3 or later -* The z/OS® shell - -Version 1.1.0 -============= - -Notes ------ -* Update recommended -* New modules - - * zos_fetch - * zos_encode - * zos_operator_action_query - * zos_operator - * zos_tso_command - * zos_ping - -* New filter -* Improved error handling and messages -* Bug fixes -* Documentation updates -* New samples - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by IBM Open Enterprise Python for z/OS: 3.8.2 or later -* Supported by IBM Z Open Automation Utilities: 1.0.3 PTF UI70435 -* Supported by z/OS V2R3 -* The z/OS® shell - - -Version 1.0.0 -============= - -Notes ------ - -* Update recommended -* Security vulnerabilities fixed -* Improved test, security and injection coverage -* Module zos_data_set catalog support added -* Documentation updates - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by IBM Z Open Automation Utilities: 1.0.1 PTF UI66957 through - 1.0.3 PTF UI70435 - .. ............................................................................. .. Global Links .. ............................................................................. @@ -960,8 +830,12 @@ Reference https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm +.. _z/OS®: + https://www.ibm.com/docs/en/zos .. _z/OS V2R3: https://www.ibm.com/support/knowledgecenter/SSLTBW_2.3.0/com.ibm.zos.v2r3/en/homepage.html +.. _z/OS V2R4: + https://www.ibm.com/docs/en/zos/2.4.0 .. _z/OS Version: https://www.ibm.com/docs/en/zos .. _FAQs: diff --git a/galaxy.yml b/galaxy.yml index b1090564c..f5c0ccf46 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.7.0-beta.2 +version: 1.7.0 # Collection README file readme: README.md @@ -91,5 +91,9 @@ build_ignore: - tests/helpers - tests/requirements.txt - tests/unit + - tests/sanity/ignore-2.9.txt - tests/sanity/ignore-2.10.txt + - tests/sanity/ignore-2.11.txt + - tests/sanity/ignore-2.12.txt + - tests/sanity/ignore-2.13.txt - venv* diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index f659df786..51e4c7392 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.7.0-beta.2" +version: "1.7.0" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/meta/runtime.yml b/meta/runtime.yml index 576832bc7..be99ccf4b 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.9.0' +requires_ansible: '>=2.14.0' diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 6b86d24a3..d65c4a468 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020, 2021, 2022 +# Copyright (c) IBM Corporation 2019-2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index d3a945d1b..d0fec1ff5 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index a4a92a985..2628ab174 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022 +# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index a3475be11..b7aeb7ee4 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -23,8 +23,6 @@ module: zos_gather_facts short_description: Gather z/OS system facts. version_added: '1.5.0' -requirements: - - ZOAU 1.2.1 or later. author: - "Ketan Kelkar (@ketankelkar)" description: From 312de14acb22af9303edfc9299cc5e9a6e9122db Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 16 Oct 2023 17:26:23 -0600 Subject: [PATCH 209/495] Enabler/validate path join part 2 (#1029) * Added real path fetch to base * Sec changes * Updated changelog and template * Added validation to zos_copy and zos_fetch Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Corrected positional argument * Added validation changes Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Removed f-string * Fixed path join for copy_to_file * Added validation function to template * Added new files * Added changelog fragment --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1029-validate-path-join.yml | 7 +++++++ plugins/module_utils/template.py | 20 ++++++++++++++----- plugins/modules/zos_archive.py | 14 ++++++++++--- plugins/modules/zos_unarchive.py | 9 +++++---- 4 files changed, 38 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1029-validate-path-join.yml diff --git a/changelogs/fragments/1029-validate-path-join.yml b/changelogs/fragments/1029-validate-path-join.yml new file mode 100644 index 000000000..234cf3148 --- /dev/null +++ b/changelogs/fragments/1029-validate-path-join.yml @@ -0,0 +1,7 @@ +minor_changes: + - zos_archive: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_unarchive: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - module_utils/template: Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) \ No newline at end of file diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py index 407a231c6..419b997b2 100644 --- a/plugins/module_utils/template.py +++ b/plugins/module_utils/template.py @@ -238,7 +238,7 @@ def render_file_template(self, file_path, variables): )) try: - template_file_path = path.join(temp_template_dir, file_path) + template_file_path = path.join(validation.validate_safe_path(temp_template_dir), validation.validate_safe_path(file_path)) with open(template_file_path, mode="w", encoding=self.encoding) as template: template.write(rendered_contents) # There could be encoding errors. @@ -300,9 +300,16 @@ def render_dir_template(self, variables): for dirpath, subdirs, files in os.walk(self.template_dir): for template_file in files: - relative_dir = os.path.relpath(dirpath, self.template_dir) - file_path = os.path.normpath(os.path.join(relative_dir, template_file)) - + relative_dir = os.path.relpath( + validation.validate_safe_path(dirpath), + validation.validate_safe_path(self.template_dir) + ) + file_path = os.path.normpath( + os.path.join( + validation.validate_safe_path(relative_dir), + validation.validate_safe_path(template_file) + ) + ) try: template = self.templating_env.get_template(file_path) rendered_contents = template.render(variables) @@ -318,7 +325,10 @@ def render_dir_template(self, variables): )) try: - template_file_path = os.path.join(temp_template_dir, file_path) + template_file_path = os.path.join( + validation.validate_safe_path(temp_template_dir), + validation.validate_safe_path(file_path) + ) os.makedirs(os.path.dirname(template_file_path), exist_ok=True) with open(template_file_path, mode="w", encoding=self.encoding) as temp: temp.write(rendered_contents) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index c48fd767e..f5306bb25 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -420,7 +420,9 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, - mvs_cmd) + validation, + mvs_cmd, +) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( MissingZOAUImport, ) @@ -650,11 +652,17 @@ def archive_targets(self): if os.path.isdir(target): for directory_path, directory_names, file_names in os.walk(target, topdown=True): for directory_name in directory_names: - full_path = os.path.join(directory_path, directory_name) + full_path = os.path.join( + validation.validate_safe_path(directory_path), + validation.validate_safe_path(directory_name) + ) self.add(full_path, strip_prefix(self.arcroot, full_path)) for file_name in file_names: - full_path = os.path.join(directory_path, file_name) + full_path = os.path.join( + validation.validate_safe_path(directory_path), + validation.validate_safe_path(file_name) + ) self.add(full_path, strip_prefix(self.arcroot, full_path)) else: self.add(target, strip_prefix(self.arcroot, target)) diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 3f79fc789..be7c93f5c 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -386,6 +386,7 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, + validation, mvs_cmd) import re import os @@ -447,7 +448,7 @@ def update_permissions(self): Update permissions in unarchived files. """ for target in self.targets: - file_name = os.path.join(self.dest, target) + file_name = os.path.join(validation.validate_safe_path(self.dest), validation.validate_safe_path(target)) file_args = self.module.load_file_common_arguments(self.module.params, path=file_name) self.module.set_fs_attributes_if_different(file_args, self.changed) @@ -906,13 +907,13 @@ def tar_filter(member, dest_path): name = member.path.lstrip('/' + os.sep) if os.path.isabs(name): raise AbsolutePathError - target_path = os.path.realpath(os.path.join(dest_path, name)) + target_path = os.path.realpath(os.path.join(validation.validate_safe_path(dest_path), validation.validate_safe_path(name))) if os.path.commonpath([target_path, dest_path]) != dest_path: raise OutsideDestinationError(member, target_path) if member.islnk() or member.issym(): if os.path.isabs(member.linkname): raise AbsoluteLinkError(member) - target_path = os.path.realpath(os.path.join(dest_path, member.linkname)) + target_path = os.path.realpath(os.path.join(validation.validate_safe_path(dest_path), validation.validate_safe_path(member.linkname))) if os.path.commonpath([target_path, dest_path]) != dest_path: raise LinkOutsideDestinationError(member, target_path) @@ -923,7 +924,7 @@ def zip_filter(member, dest_path): name = name.lstrip('/' + os.sep) if os.path.isabs(name): raise AbsolutePathError - target_path = os.path.realpath(os.path.join(dest_path, name)) + target_path = os.path.realpath(os.path.join(validation.validate_safe_path(dest_path), validation.validate_safe_path(name))) if os.path.commonpath([target_path, dest_path]) != dest_path: raise OutsideDestinationError(member, target_path) From 36c2684847e0fa6fddb7228a66f2c81687a92c75 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Thu, 19 Oct 2023 23:49:43 -0700 Subject: [PATCH 210/495] Add known issues doc for utf8 issues (#1035) * Add known issues doc for utf8 issues Signed-off-by: ddimatos <dimatos@gmail.com> * Updated changelog fragement to remove colon usage Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected changelog errors Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/1029-validate-path-join.yml | 10 ++++---- .../1034-document-utf8-known-issue.yml | 23 +++++++++++++++++++ ...-operator-response-come-back-truncate.yaml | 6 ++--- .../934-Remove-conditional-unnecessary.yml | 2 +- ...nhance-Add-wait-zos-operator-and-query.yml | 4 ++-- ...or-zos-copy-and-remove-temporary-files.yml | 4 ++-- ...os-job-submit-truncate-final-character.yml | 2 +- .../fragments/963-validate-path-join.yml | 4 ++-- .../fragments/965-enhance-archive-tests.yml | 4 ++-- 9 files changed, 41 insertions(+), 18 deletions(-) create mode 100644 changelogs/fragments/1034-document-utf8-known-issue.yml diff --git a/changelogs/fragments/1029-validate-path-join.yml b/changelogs/fragments/1029-validate-path-join.yml index 234cf3148..785c1a41b 100644 --- a/changelogs/fragments/1029-validate-path-join.yml +++ b/changelogs/fragments/1029-validate-path-join.yml @@ -1,7 +1,7 @@ minor_changes: - - zos_archive: Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - zos_unarchive: Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - module_utils/template: Add validation into path joins to detect unauthorized path traversals. + - zos_archive - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_unarchive - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - module_utils/template - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) \ No newline at end of file diff --git a/changelogs/fragments/1034-document-utf8-known-issue.yml b/changelogs/fragments/1034-document-utf8-known-issue.yml new file mode 100644 index 000000000..860fd3f6c --- /dev/null +++ b/changelogs/fragments/1034-document-utf8-known-issue.yml @@ -0,0 +1,23 @@ +known_issues: + - Several modules have reported UTF8 decoding errors when interacting with results + that contain non-printable UTF8 characters in the response. This occurs when + a module receives content that does not correspond to a UTF-8 value. + These include modules `zos_job_submit`, `zos_job_output`, + `zos_operator_action_query` but are not limited to this list. + This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. + Each case is unique, some options to work around the error are below. + - Specify that the ASA assembler option be enabled to instruct the assembler + to use ANSI control characters instead of machine code control characters. + - Add `ignore_errors:true` to the playbook task so the task error will not + fail the playbook. + - If the error is resulting from a batch job, add `ignore_errors:true` to the + task and capture the output into a variable and extract the job ID with a + regular expression and then use `zos_job_output` to display the DD without + the non-printable character such as the DD `JESMSGLG`. + (https://github.com/ansible-collections/ibm_zos_core/issues/677) + (https://github.com/ansible-collections/ibm_zos_core/issues/776) + (https://github.com/ansible-collections/ibm_zos_core/issues/972) + - With later versions of `ansible-core` used with `ibm_zos_core` collection a + warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" + that is currently being reviewed. There are no recommendations at this point. + (https://github.com/ansible-collections/ibm_zos_core/issues/983) diff --git a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml index 58900fc01..1e2d3c10f 100644 --- a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml +++ b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml @@ -1,4 +1,4 @@ bugfixes: -- zos_operator: The last line of the operator was missing in the response of the module. - Fix now ensures the presence of the full output of the operator. - (https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file + - zos_operator - The last line of the operator was missing in the response of + the module. The fix now ensures the presence of the full output of the operator. + https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file diff --git a/changelogs/fragments/934-Remove-conditional-unnecessary.yml b/changelogs/fragments/934-Remove-conditional-unnecessary.yml index bf07c7f32..3ceeffa99 100644 --- a/changelogs/fragments/934-Remove-conditional-unnecessary.yml +++ b/changelogs/fragments/934-Remove-conditional-unnecessary.yml @@ -1,2 +1,2 @@ -- trivial: +trivial: - zos_blockinfile - remove test conditional unnecessary (https://github.com/ansible-collections/ibm_zos_core/pull/934). \ No newline at end of file diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml index 5a8202c34..91f920145 100644 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml @@ -1,8 +1,8 @@ minor_changes: - - zos_operator: Changed system to call 'wait=true' parameter to zoau call. + - zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_operator_action_query: Add a max delay of 5 seconds on each part of the operator_action_query. + - zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) diff --git a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml index c90921c9f..99a0599ec 100644 --- a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml +++ b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml @@ -1,7 +1,7 @@ bugfixes: - - zos_job_submit: Temporary files were created in tmp directory. + - zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) minor_changes: - - zos_job_submit: Change action plugin call from copy to zos_copy. + - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) \ No newline at end of file diff --git a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml index aca865791..7a4ce88cb 100644 --- a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml +++ b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml @@ -1,4 +1,4 @@ bugfixes: -- zos_job_submit: The last line of the jcl was missing in the input. +- zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) \ No newline at end of file diff --git a/changelogs/fragments/963-validate-path-join.yml b/changelogs/fragments/963-validate-path-join.yml index 017c793cc..129af357e 100644 --- a/changelogs/fragments/963-validate-path-join.yml +++ b/changelogs/fragments/963-validate-path-join.yml @@ -1,5 +1,5 @@ minor_changes: - - zos_fetch: Add validation into path joins to detect unauthorized path traversals. + - zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) - - zos_copy: Add validation into path joins to detect unauthorized path traversals. + - zos_copy - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) \ No newline at end of file diff --git a/changelogs/fragments/965-enhance-archive-tests.yml b/changelogs/fragments/965-enhance-archive-tests.yml index b86bf22bf..80705e4c1 100644 --- a/changelogs/fragments/965-enhance-archive-tests.yml +++ b/changelogs/fragments/965-enhance-archive-tests.yml @@ -1,5 +1,5 @@ minor_changes: - - zos_archive: Enhanced test cases to use test lines the same length of the record length. + - zos_archive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) - - zos_unarchive: Enhanced test cases to use test lines the same length of the record length. + - zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) \ No newline at end of file From 9d3cbebc71fba42df88393ad2d1517c4671513c8 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 24 Oct 2023 09:23:15 -0700 Subject: [PATCH 211/495] [Enhancement] [zos_copy] Add support for ASA control chars (#1028) * Added ASA support when copying from datasets with control chars * Added ASA support when copying from USS to sequential data sets * Added ASA support when copying from USS to PDS/E * Re-enabled copy from datasets to USS * Added copy from non-ASA data sets to seq ASA ones * Added copy from non-ASA data sets to partitioned ones * Added allocation of ASA destination data sets * Added first version of ASA compatibility validation * Added validations for asa_text * Added asa_text documentation * Fixed record_length issue when creating ASA data sets * Fixed record_length issue * Added asa_text example * Added first test * Added DBRM doc to zos_copy module (#1025) * Added DBRM doc to zos_copy module Signed-off-by: ddimatos <dimatos@gmail.com> * Removed an extra space from doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added more tests * Added changelog fragment * Updated RST files * Removed unused import * Fixed merge * Added another validation * Updated docs * Changed use of tsocmd for mvscmd * Fixed merge error * Fixed bug with VSAM sources * Fixed bug with VSAM destinations --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> --- .../fragments/1028-asa-control-chars.yml | 4 + docs/source/modules/zos_copy.rst | 44 ++- docs/source/modules/zos_operator.rst | 19 +- docs/source/modules/zos_script.rst | 12 +- plugins/action/zos_copy.py | 9 + plugins/module_utils/copy.py | 96 ++++- plugins/module_utils/data_set.py | 8 +- plugins/modules/zos_copy.py | 301 ++++++++++++--- .../functional/modules/test_zos_copy_func.py | 344 ++++++++++++++++++ tests/pytest.ini | 3 +- 10 files changed, 769 insertions(+), 71 deletions(-) create mode 100644 changelogs/fragments/1028-asa-control-chars.yml diff --git a/changelogs/fragments/1028-asa-control-chars.yml b/changelogs/fragments/1028-asa-control-chars.yml new file mode 100644 index 000000000..6afc35e50 --- /dev/null +++ b/changelogs/fragments/1028-asa-control-chars.yml @@ -0,0 +1,4 @@ +minor_changes: +- zos_copy: add support in zos_copy for text files and data sets containing ASA + control characters. + (https://github.com/ansible-collections/ibm_zos_core/pull/1028) \ No newline at end of file diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 191570bae..e19332bf4 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -26,6 +26,23 @@ Parameters ---------- +asa_text + If set to ``true``, indicates that either ``src`` or ``dest`` or both contain ASA control characters. + + When ``src`` is a USS file and ``dest`` is a data set, the copy will preserve ASA control characters in the destination. + + When ``src`` is a data set containing ASA control characters and ``dest`` is a USS file, the copy will put all control characters as plain text in the destination. + + If ``dest`` is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). + + If neither ``src`` or ``dest`` have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. + + This option is only valid for text files. If ``is_binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. + + | **required**: False + | **type**: bool + + backup Specifies whether a backup of the destination should be created before copying data. @@ -140,6 +157,19 @@ force | **type**: bool +force_lock + By default, when c(dest) is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. + + If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. + + Using ``force_lock`` uses operations that are subject to race conditions and can lead to data loss, use with caution. + + If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in the aliases not being preserved in the target dataset. When this scenario occurs the module will fail. + + | **required**: False + | **type**: bool + + ignore_sftp_stderr During data transfer through SFTP, the module fails if the SFTP command directs any content to stderr. The user is able to override this behavior by setting this parameter to ``true``. By doing so, the module would essentially ignore the stderr stream produced by SFTP and continue execution. @@ -150,7 +180,11 @@ ignore_sftp_stderr is_binary - If set to ``true``, indicates that the file or data set to be copied is a binary file/data set. + If set to ``true``, indicates that the file or data set to be copied is a binary file or data set. + + When *is_binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. + + Use *is_binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. | **required**: False | **type**: bool @@ -706,7 +740,7 @@ Examples executable: true aliases: true - - name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE + - name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE zos_copy: src: '/home/loadlib/' dest: HLQ.LOADLIB.NEW @@ -714,6 +748,12 @@ Examples executable: true aliases: true + - name: Copy a file with ASA characters to a new sequential data set. + zos_copy: + src: ./files/print.txt + dest: HLQ.PRINT.NEW + asa_text: true + diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index b05b0331a..e0f65414f 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -58,18 +58,6 @@ wait_time_s | **default**: 1 -wait - Configuring wait used by the `zos_operator <./zos_operator.html>`_ module has been deprecated and will be removed in a future ibm.ibm_zos_core collection. - - Setting this option will yield no change, it is deprecated. - - Review option *wait_time_s* to instruct operator commands to wait. - - | **required**: False - | **type**: bool - | **default**: True - - Examples @@ -91,16 +79,11 @@ Examples zos_operator: cmd: "\\$PJ(*)" - - name: Execute operator command to show jobs, waiting up to 5 seconds for response + - name: Execute operator command to show jobs, always waiting 5 seconds for response zos_operator: cmd: 'd a,all' wait_time_s: 5 - - name: Execute operator command to show jobs, always waiting 7 seconds for response - zos_operator: - cmd: 'd a,all' - wait_time_s: 7 - - name: Display the system symbols and associated substitution texts. zos_operator: cmd: 'D SYMBOLS' diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index 6fc9a0ece..29d9bb2df 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -103,12 +103,14 @@ removes tmp_path - Path in the remote machine where local scripts will be temporarily copied to. + Directory path in the remote machine where local scripts will be temporarily copied to. When not specified, the module will copy local scripts to the default temporary path for the user. If ``tmp_path`` does not exist in the remote machine, the module will not create it. + All scripts copied to ``tmp_path`` will be removed from the managed node before the module finishes executing. + | **required**: False | **type**: str @@ -292,7 +294,9 @@ Notes .. note:: When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. - Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for the script will be restored by the module before the task ends. + Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for remote scripts will be restored by the module before the task ends. + + The module will only add execution permissions for the file owner. If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error ``BPXW0003I``. @@ -302,7 +306,9 @@ Notes `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. - This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. + This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. + + The community script module does not rely on Python to execute scripts on a managed node, while this module does. Python must be present on the remote machine. diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index d65c4a468..d7d00eb64 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -61,6 +61,7 @@ def run(self, tmp=None, task_vars=None): is_binary = _process_boolean(task_args.get('is_binary'), default=False) force_lock = _process_boolean(task_args.get('force_lock'), default=False) executable = _process_boolean(task_args.get('executable'), default=False) + asa_text = _process_boolean(task_args.get('asa_text'), default=False) ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=False) backup_name = task_args.get("backup_name", None) encoding = task_args.get("encoding", None) @@ -117,6 +118,14 @@ def run(self, tmp=None, task_vars=None): msg = "Backup file provided but 'backup' parameter is False" return self._exit_action(result, msg, failed=True) + if is_binary and asa_text: + msg = "Both 'is_binary' and 'asa_text' are True. Unable to copy binary data as an ASA text file." + return self._exit_action(result, msg, failed=True) + + if executable and asa_text: + msg = "Both 'executable' and 'asa_text' are True. Unable to copy an executable as an ASA text file." + return self._exit_action(result, msg, failed=True) + use_template = _process_boolean(task_args.get("use_template"), default=False) if remote_src and use_template: msg = "Use of Jinja2 templates is only valid for local files, remote_src cannot be set to true." diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 7edd8a49c..ac9e74758 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -22,6 +22,9 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import ( + ikjeft01 +) if PY3: from shlex import quote @@ -110,7 +113,7 @@ def copy_ps2uss(src, dest, is_binary=False): return rc, out, err -def copy_pds2uss(src, dest, is_binary=False): +def copy_pds2uss(src, dest, is_binary=False, asa_text=False): """Copy the whole PDS(E) to a uss path Arguments: @@ -119,6 +122,8 @@ def copy_pds2uss(src, dest, is_binary=False): Keyword Arguments: is_binary: {bool} -- Whether the file to be copied contains binary data + asa_text: {bool} -- Whether the file to be copied contains ASA control + characters Raises: USSCmdExecError: When any exception is raised during the conversion. @@ -130,12 +135,22 @@ def copy_pds2uss(src, dest, is_binary=False): module = AnsibleModuleHelper(argument_spec={}) src = _validate_data_set_name(src) dest = _validate_path(dest) + cp_pds2uss = "cp -U -F rec \"//'{0}'\" {1}".format(src, quote(dest)) - if is_binary: + + # When dealing with ASA control chars, each record follows a + # different format than what '-F rec' means, so we remove it + # to allow the system to leave the control chars in the + # destination. + if asa_text: + cp_pds2uss = cp_pds2uss.replace("-F rec", "", 1) + elif is_binary: cp_pds2uss = cp_pds2uss.replace("rec", "bin", 1) + rc, out, err = module.run_command(cp_pds2uss) if rc: raise USSCmdExecError(cp_pds2uss, rc, out, err) + return rc, out, err @@ -216,6 +231,83 @@ def copy_vsam_ps(src, dest): return rc, out, err +def copy_asa_uss2mvs(src, dest): + """Copy a file from USS to an ASA sequential data set or PDS/E member. + + Arguments: + src: {str} -- Path of the USS file + dest: {str} -- The MVS destination data set or member + + Returns: + boolean -- The return code after the copy command executed successfully + str -- The stdout after the copy command executed successfully + str -- The stderr after the copy command executed successfully + """ + oget_cmd = "OGET '{0}' '{1}'".format(src, dest) + rc, out, err = ikjeft01(oget_cmd, authorized=True) + + return TSOCmdResponse(rc, out, err) + + +def copy_asa_mvs2uss(src, dest): + """Copy an ASA sequential data set or member to USS. + + Arguments: + src: {str} -- The MVS data set to be copied + dest: {str} -- Destination path in USS + + Returns: + boolean -- The return code after the copy command executed successfully + str -- The stdout after the copy command executed successfully + str -- The stderr after the copy command executed successfully + """ + src = _validate_data_set_name(src) + dest = _validate_path(dest) + + oput_cmd = "OPUT '{0}' '{1}'".format(src, dest) + rc, out, err = ikjeft01(oput_cmd, authorized=True) + + return TSOCmdResponse(rc, out, err) + + +def copy_asa_pds2uss(src, dest): + """Copy all members from an ASA PDS/E to USS. + + Arguments: + src: {str} -- The MVS data set to be copied + dest: {str} -- Destination path in USS (must be a directory) + + Returns: + boolean -- The return code after the copy command executed successfully + str -- The stdout after the copy command executed successfully + str -- The stderr after the copy command executed successfully + """ + from os import path + from zoautil_py import datasets + + src = _validate_data_set_name(src) + dest = _validate_path(dest) + + for member in datasets.list_members(src): + src_member = '{0}({1})'.format(src, member) + dest_path = path.join(dest, member) + + oput_cmd = "OPUT '{0}' '{1}'".format(src_member, dest_path) + rc, out, err = ikjeft01(oput_cmd, authorized=True) + + if rc != 0: + return TSOCmdResponse(rc, out, err) + + return TSOCmdResponse(0, '', '') + + +class TSOCmdResponse(): + def __init__(self, rc, stdout, stderr): + self.rc = rc + self.stdout_response = stdout + self.stderr_response = stderr + + class USSCmdExecError(Exception): def __init__(self, uss_cmd, rc, out, err): self.msg = ( diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 69e1190f1..cbeb7eb7d 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -278,7 +278,7 @@ def ensure_uncataloged(name): return False @staticmethod - def allocate_model_data_set(ds_name, model, vol=None): + def allocate_model_data_set(ds_name, model, asa_text=False, vol=None): """Allocates a data set based on the attributes of a 'model' data set. Useful when a data set needs to be created identical to another. Supported model(s) are Physical Sequential (PS), Partitioned Data Sets (PDS/PDSE), @@ -291,6 +291,8 @@ def allocate_model_data_set(ds_name, model, vol=None): must be used. See extract_dsname(ds_name) in data_set.py model {str} -- The name of the data set whose allocation parameters should be used to allocate the new data set 'ds_name' + asa_text {bool} -- Whether the new data set should support ASA control + characters (have record format FBA) vol {str} -- The volume where data set should be allocated Raise: @@ -321,6 +323,10 @@ def allocate_model_data_set(ds_name, model, vol=None): alloc_cmd = """{0} - VOLUME({1})""".format(alloc_cmd, vol.upper()) + if asa_text: + alloc_cmd = """{0} - + RECFM(F,B,A)""".format(alloc_cmd) + rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True) if rc != 0: raise MVSCmdExecError(rc, out, err) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index c671d87a0..9bafdc471 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -30,6 +30,25 @@ - "Demetrios Dimatos (@ddimatos)" - "Ivan Moreno (@rexemin)" options: + asa_text: + description: + - If set to C(true), indicates that either C(src) or C(dest) or both + contain ASA control characters. + - When C(src) is a USS file and C(dest) is a data set, the copy will + preserve ASA control characters in the destination. + - When C(src) is a data set containing ASA control characters and + C(dest) is a USS file, the copy will put all control characters as + plain text in the destination. + - If C(dest) is a non-existent data set, it will be created with record + format Fixed Block with ANSI format (FBA). + - If neither C(src) or C(dest) have record format Fixed Block with ANSI + format (FBA) or Variable Block with ANSI format (VBA), the module + will fail. + - This option is only valid for text files. If C(is_binary) is C(true) + or C(executable) is C(true) as well, the module will fail. + type: bool + default: false + required: false backup: description: - Specifies whether a backup of the destination should be created before @@ -185,7 +204,11 @@ is_binary: description: - If set to C(true), indicates that the file or data set to be copied is a - binary file/data set. + binary file or data set. + - When I(is_binary=true), no encoding conversion is applied to the content, + all content transferred retains the original state. + - Use I(is_binary=true) when copying a Database Request Module (DBRM) to + retain the original state of the serialized SQL statements of a program. type: bool default: false required: false @@ -616,6 +639,12 @@ remote_src: true executable: true aliases: true + +- name: Copy a file with ASA characters to a new sequential data set. + zos_copy: + src: ./files/print.txt + dest: HLQ.PRINT.NEW + asa_text: true """ RETURN = r""" @@ -815,6 +844,7 @@ def __init__( is_binary=False, executable=False, aliases=False, + asa_text=False, backup_name=None, force_lock=False, ): @@ -838,6 +868,7 @@ def __init__( self.module = module self.is_binary = is_binary self.executable = executable + self.asa_text = asa_text self.aliases = aliases self.backup_name = backup_name self.force_lock = force_lock @@ -851,7 +882,8 @@ def copy_to_seq( src, temp_path, conv_path, - dest + dest, + src_type ): """Copy source to a sequential data set. @@ -864,18 +896,24 @@ def copy_to_seq( transferred data to conv_path {str} -- Path to the converted source file dest {str} -- Name of destination data set + src_type {str} -- Type of the source """ new_src = conv_path or temp_path or src copy_args = dict() copy_args["options"] = "" - if self.is_binary: - copy_args["options"] = "-B" + if src_type == 'USS' and self.asa_text: + response = copy.copy_asa_uss2mvs(new_src, dest) + else: + # While ASA files are just text files, we do a binary copy + # so dcp doesn't introduce any additional blanks or newlines. + if self.is_binary or self.asa_text: + copy_args["options"] = "-B" - if self.force_lock: - copy_args["options"] += " -f" + if self.force_lock: + copy_args["options"] += " -f" - response = datasets._copy(new_src, dest, None, **copy_args) + response = datasets._copy(new_src, dest, None, **copy_args) if response.rc != 0: raise CopyOperationError( msg="Unable to copy source {0} to {1}".format(new_src, dest), @@ -1118,6 +1156,7 @@ def __init__( module, is_binary=False, executable=False, + asa_text=False, aliases=False, common_file_args=None, backup_name=None, @@ -1136,7 +1175,12 @@ def __init__( backup_name {str} -- The USS path or data set name of destination backup """ super().__init__( - module, is_binary=is_binary, executable=executable, aliases=aliases, backup_name=backup_name + module, + is_binary=is_binary, + executable=executable, + asa_text=asa_text, + aliases=aliases, + backup_name=backup_name ) self.common_file_args = common_file_args @@ -1162,11 +1206,13 @@ def copy_to_uss( src_ds_type {str} -- Type of source src_member {bool} -- Whether src is a data set member member_name {str} -- The name of the source data set member - force {bool} -- Wheter to copy files to an already existing directory + force {bool} -- Whether to copy files to an already existing directory Returns: {str} -- Destination where the file was copied to """ + changed_files = None + if src_ds_type in data_set.DataSet.MVS_SEQ.union(data_set.DataSet.MVS_PARTITIONED): self._mvs_copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name @@ -1434,10 +1480,13 @@ def _mvs_copy_to_uss( try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: - if self.executable: + if self.asa_text: + response = copy.copy_asa_mvs2uss(src, dest) + elif self.executable: response = datasets._copy(src, dest, None, **opts) else: response = datasets._copy(src, dest) + if response.rc != 0: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), @@ -1448,6 +1497,17 @@ def _mvs_copy_to_uss( else: if self.executable: response = datasets._copy(src, dest, None, **opts) + + if response.rc != 0: + raise CopyOperationError( + msg="Error while copying source {0} to {1}".format(src, dest), + rc=response.rc, + stdout=response.stdout_response, + stderr=response.stderr_response + ) + elif self.asa_text: + response = copy.copy_asa_pds2uss(src, dest) + if response.rc != 0: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), @@ -1456,7 +1516,14 @@ def _mvs_copy_to_uss( stderr=response.stderr_response ) else: - copy.copy_pds2uss(src, dest, is_binary=self.is_binary) + copy.copy_pds2uss( + src, + dest, + is_binary=self.is_binary, + asa_text=self.asa_text + ) + except CopyOperationError as err: + raise err except Exception as err: raise CopyOperationError(msg=str(err)) @@ -1468,6 +1535,7 @@ def __init__( is_binary=False, executable=False, aliases=False, + asa_text=False, backup_name=None, force_lock=False, ): @@ -1488,8 +1556,9 @@ def __init__( is_binary=is_binary, executable=executable, aliases=aliases, + asa_text=asa_text, backup_name=backup_name, - force_lock=force_lock, + force_lock=force_lock ) def copy_to_pdse( @@ -1578,7 +1647,11 @@ def copy_to_pdse( else: new_members.append(destination_member) - result = self.copy_to_member(src_member, "{0}({1})".format(dest, destination_member)) + result = self.copy_to_member( + src_member, + "{0}({1})".format(dest, destination_member), + src_ds_type + ) if result["rc"] != 0: msg = "Unable to copy source {0} to data set member {1}({2})".format( @@ -1598,7 +1671,8 @@ def copy_to_pdse( def copy_to_member( self, src, - dest + dest, + src_type ): """Copy source to a PDS/PDSE member. The only valid sources are: - USS files @@ -1608,6 +1682,7 @@ def copy_to_member( Arguments: src {str} -- Path to USS file or data set name. dest {str} -- Name of destination data set + src_type {str} -- Type of the source. Returns: dict -- Dictionary containing the return code, stdout, and stderr from @@ -1618,22 +1693,27 @@ def copy_to_member( opts = dict() opts["options"] = "" - if self.is_binary: - opts["options"] = "-B" + if src_type == 'USS' and self.asa_text: + response = copy.copy_asa_uss2mvs(src, dest) + else: + # While ASA files are just text files, we do a binary copy + # so dcp doesn't introduce any additional blanks or newlines. + if self.is_binary or self.asa_text: + opts["options"] = "-B" - if self.aliases and not self.executable: - # lower case 'i' for text-based copy (dcp) - opts["options"] = "-i" + if self.aliases and not self.executable: + # lower case 'i' for text-based copy (dcp) + opts["options"] = "-i" - if self.executable: - opts["options"] = "-X" - if self.aliases: - opts["options"] = "-IX" + if self.executable: + opts["options"] = "-X" + if self.aliases: + opts["options"] = "-IX" - if self.force_lock: - opts["options"] += " -f" + if self.force_lock: + opts["options"] += " -f" - response = datasets._copy(src, dest, None, **opts) + response = datasets._copy(src, dest, None, **opts) rc, out, err = response.rc, response.stdout_response, response.stderr_response return dict( @@ -1702,6 +1782,7 @@ def get_data_set_attributes( name, size, is_binary, + asa_text=False, record_format=None, record_length=None, type="SEQ", @@ -1725,6 +1806,7 @@ def get_data_set_attributes( name (str) -- Name of the new sequential data set. size (int) -- Number of bytes needed for the new data set. is_binary (bool) -- Whether or not the data set will have binary data. + asa_text (bool) -- Whether the data set will have ASA control characters. record_format (str, optional) -- Type of record format. record_length (int, optional) -- Record length for the data set. type (str, optional) -- Type of the new data set. @@ -1761,6 +1843,10 @@ def get_data_set_attributes( else: block_size = max_block_size + if asa_text: + record_format = "FBA" + block_size = 27920 + parms = dict( name=name, type=type, @@ -1783,6 +1869,8 @@ def create_seq_dataset_from_file( dest, force, is_binary, + asa_text, + record_length=None, volume=None ): """Creates a new sequential dataset with attributes suitable to copy the @@ -1793,21 +1881,37 @@ def create_seq_dataset_from_file( dest (str) -- Name of the data set. force (bool) -- Whether to replace an existing data set. is_binary (bool) -- Whether the file has binary data. + asa_text (bool) -- Whether the file has ASA control characters. volume (str, optional) -- Volume where the data set should be. """ src_size = os.stat(file).st_size - record_format = record_length = None + # record_format = record_length = None + record_format = None + # When dealing with ASA files, if copying from USS, + # the record length will need to be adjusted (we know it + # comes from USS because those flows don't send a + # value for record_length, while flows from source data + # sets do). + adjust_record_format = False # When src is a binary file, the module will use default attributes # for the data set, such as a record format of "VB". if not is_binary: record_format = "FB" - record_length = get_file_record_length(file) + if not record_length: + record_length = get_file_record_length(file) + adjust_record_format = True + + if asa_text and adjust_record_format: + # Adding one byte more to the record length to account for the + # control character at the start of each line. + record_length += 1 dest_params = get_data_set_attributes( name=dest, size=src_size, is_binary=is_binary, + asa_text=asa_text, record_format=record_format, record_length=record_length, volume=volume @@ -1849,7 +1953,10 @@ def is_compatible( src_member, is_src_dir, is_src_inline, - executable + executable, + asa_text, + src_has_asa_chars, + dest_has_asa_chars ): """Determine whether the src and dest are compatible and src can be copied to dest. @@ -1862,6 +1969,9 @@ def is_compatible( is_src_dir {bool} -- Whether the src is a USS directory. is_src_inline {bool} -- Whether the src comes from inline content. executable {bool} -- Whether the src is a executable to be copied. + asa_text {bool} -- Whether the copy operation will handle ASA control characters. + src_has_asa_chars {bool} -- Whether the src contains ASA control characters. + dest_has_asa_chars {bool} -- Whether the dest contains ASA control characters. Returns: {bool} -- Whether src can be copied to dest. @@ -1882,6 +1992,13 @@ def is_compatible( if src_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_SEQ: return False + # ******************************************************************** + # For copy operations involving ASA control characters, at least one + # of the files/data sets has got to have ASA characters. + # ******************************************************************** + if asa_text: + return src_has_asa_chars or dest_has_asa_chars + # ******************************************************************** # If source is a sequential data set, then destination must be # partitioned data set member, other sequential data sets or USS files. @@ -2076,6 +2193,7 @@ def get_attributes_of_any_dataset_created( src, src_name, is_binary, + asa_text, volume=None ): """ @@ -2088,6 +2206,7 @@ def get_attributes_of_any_dataset_created( src (str) -- Name of the source data set, used as a model when appropiate. src_name (str) -- Extraction of the source name without the member pattern. is_binary (bool) -- Whether the data set will contain binary data. + asa_text (bool) -- Whether the data set will contain ASA control characters. volume (str, optional) -- Volume where the data set should be allocated into. Returns: @@ -2098,14 +2217,32 @@ def get_attributes_of_any_dataset_created( if src_ds_type == "USS": if os.path.isfile(src): size = os.stat(src).st_size - params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + params = get_data_set_attributes( + dest, + size=size, + is_binary=is_binary, + asa_text=asa_text, + volume=volume + ) else: size = os.path.getsize(src) - params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + params = get_data_set_attributes( + dest, + size=size, + is_binary=is_binary, + asa_text=asa_text, + volume=volume + ) else: src_attributes = datasets.listing(src_name)[0] size = int(src_attributes.total_space) - params = get_data_set_attributes(dest, size=size, is_binary=is_binary, volume=volume) + params = get_data_set_attributes( + dest, + size=size, + is_binary=is_binary, + asa_text=asa_text, + volume=volume + ) return params @@ -2118,6 +2255,7 @@ def allocate_destination_data_set( force, is_binary, executable, + asa_text, dest_data_set=None, volume=None ): @@ -2134,6 +2272,7 @@ def allocate_destination_data_set( force (bool) -- Whether to replace an existent data set. is_binary (bool) -- Whether the data set will contain binary data. executable (bool) -- Whether the data to copy is an executable dataset or file. + asa_text (bool) -- Whether the data to copy has ASA control characters. dest_data_set (dict, optional) -- Parameters containing a full definition of the new data set; they will take precedence over any other allocation logic. volume (str, optional) -- Volume where the data set should be allocated into. @@ -2169,16 +2308,26 @@ def allocate_destination_data_set( if src_ds_type == "USS": # Taking the temp file when a local file was copied with sftp. - create_seq_dataset_from_file(src, dest, force, is_binary, volume=volume) + create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: temp_dump = None try: # Dumping the member into a file in USS to compute the record length and # size for the new data set. + src_attributes = datasets.listing(src_name)[0] + record_length = int(src_attributes.lrecl) temp_dump = dump_data_set_member_to_file(src, is_binary) - create_seq_dataset_from_file(temp_dump, dest, force, is_binary, volume=volume) + create_seq_dataset_from_file( + temp_dump, + dest, + force, + is_binary, + asa_text, + record_length=record_length, + volume=volume + ) finally: if temp_dump: os.remove(temp_dump) @@ -2195,6 +2344,7 @@ def allocate_destination_data_set( dest, size, is_binary, + asa_text, record_format=record_format, record_length=record_length, type="LIBRARY", @@ -2202,15 +2352,23 @@ def allocate_destination_data_set( ) data_set.DataSet.ensure_present(replace=force, **dest_params) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.listing(src_name)[0] # The size returned by listing is in bytes. size = int(src_attributes.total_space) record_format = src_attributes.recfm record_length = int(src_attributes.lrecl) - dest_params = get_data_set_attributes(dest, size, is_binary, record_format=record_format, record_length=record_length, type="PDSE", - volume=volume) + dest_params = get_data_set_attributes( + dest, + size, + is_binary, + asa_text, + record_format=record_format, + record_length=record_length, + type="PDSE", + volume=volume + ) data_set.DataSet.ensure_present(replace=force, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): @@ -2226,6 +2384,11 @@ def allocate_destination_data_set( record_format = "FB" record_length = get_file_record_length(src) + # Adding 1 byte to the record length to accommodate + # ASA control chars. + if asa_text: + record_length += 1 + if executable: record_format = "U" record_length = 0 @@ -2235,6 +2398,7 @@ def allocate_destination_data_set( dest, size, is_binary, + asa_text, record_format=record_format, record_length=record_length, type=type_ds, @@ -2254,7 +2418,14 @@ def allocate_destination_data_set( volume=volume ) else: - dest_params = get_data_set_attributes(dest, size, is_binary, type="PDSE", volume=volume) + dest_params = get_data_set_attributes( + dest, + size, + is_binary, + asa_text, + type="PDSE", + volume=volume + ) data_set.DataSet.ensure_present(replace=force, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_VSAM: @@ -2264,7 +2435,15 @@ def allocate_destination_data_set( data_set.DataSet.ensure_absent(dest, volumes=volumes) data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) if dest_ds_type not in data_set.DataSet.MVS_VSAM: - dest_params = get_attributes_of_any_dataset_created(dest, src_ds_type, src, src_name, is_binary, volume) + dest_params = get_attributes_of_any_dataset_created( + dest, + src_ds_type, + src, + src_name, + is_binary, + asa_text, + volume + ) dest_attributes = datasets.listing(dest)[0] record_format = dest_attributes.recfm dest_params["type"] = dest_ds_type @@ -2371,6 +2550,7 @@ def run_module(module, arg_def): remote_src = module.params.get('remote_src') is_binary = module.params.get('is_binary') executable = module.params.get('executable') + asa_text = module.params.get('asa_text') aliases = module.params.get('aliases') backup = module.params.get('backup') backup_name = module.params.get('backup_name') @@ -2436,6 +2616,10 @@ def run_module(module, arg_def): # ******************************************************************** dest_member_exists = False converted_src = None + # By default, we'll assume that src and dest don't have ASA control + # characters. We'll only update these variables when they are + # data sets with record format 'FBA' or 'VBA'. + src_has_asa_chars = dest_has_asa_chars = False try: # If temp_path, the plugin has copied a file from the controller to USS. if temp_path or "/" in src: @@ -2481,6 +2665,10 @@ def run_module(module, arg_def): raise NonExistentSourceError(src) src_ds_type = data_set.DataSet.data_set_type(src_name) + if src_ds_type not in data_set.DataSet.MVS_VSAM: + src_attributes = datasets.listing(src_name)[0] + if src_attributes.recfm == 'FBA' or src_attributes.recfm == 'VBA': + src_has_asa_chars = True else: raise NonExistentSourceError(src) @@ -2522,6 +2710,15 @@ def run_module(module, arg_def): if dest_data_set and dest_data_set.get("type"): dest_ds_type = dest_data_set.get("type") + if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): + dest_has_asa_chars = True + elif not dest_exists and asa_text: + dest_has_asa_chars = True + elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: + dest_attributes = datasets.listing(dest_name)[0] + if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA': + dest_has_asa_chars = True + if dest_ds_type in data_set.DataSet.MVS_PARTITIONED: # Checking if the members that would be created from the directory files # are already present on the system. @@ -2554,12 +2751,20 @@ def run_module(module, arg_def): src_member, is_src_dir, (src_ds_type == "USS" and src is None), - executable + executable, + asa_text, + src_has_asa_chars, + dest_has_asa_chars ): + error_msg = "Incompatible target type '{0}' for source '{1}'".format( + dest_ds_type, src_ds_type + ) + + if asa_text: + error_msg = "{0}. Neither the source or the destination are ASA text files.".format(error_msg) + module.fail_json( - msg="Incompatible target type '{0}' for source '{1}'".format( - dest_ds_type, src_ds_type - ) + msg=error_msg ) # ******************************************************************** @@ -2683,6 +2888,7 @@ def run_module(module, arg_def): force, is_binary, executable, + asa_text, dest_data_set=dest_data_set, volume=volume ) @@ -2711,6 +2917,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + asa_text=asa_text, backup_name=backup_name, force_lock=force_lock, ) @@ -2731,6 +2938,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + asa_text=asa_text, aliases=aliases, common_file_args=dict(mode=mode, group=group, owner=owner), backup_name=backup_name, @@ -2774,6 +2982,7 @@ def run_module(module, arg_def): # Copy to sequential data set (PS / SEQ) # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_SEQ: + # TODO: check how ASA behaves with this if src_ds_type == "USS" and not is_binary: new_src = conv_path or temp_path or src conv_path = normalize_line_endings(new_src, encoding) @@ -2783,6 +2992,7 @@ def run_module(module, arg_def): temp_path, conv_path, dest, + src_ds_type ) res_args["changed"] = True dest = dest.upper() @@ -2798,6 +3008,7 @@ def run_module(module, arg_def): module, is_binary=is_binary, executable=executable, + asa_text=asa_text, aliases=aliases, backup_name=backup_name, force_lock=force_lock, @@ -2846,6 +3057,7 @@ def main(): dest=dict(required=True, type='str'), is_binary=dict(type='bool', default=False), executable=dict(type='bool', default=False), + asa_text=dict(type='bool', default=False), aliases=dict(type='bool', default=False, required=False), encoding=dict( type='dict', @@ -2949,6 +3161,7 @@ def main(): dest=dict(arg_type='data_set_or_path', required=True), is_binary=dict(arg_type='bool', required=False, default=False), executable=dict(arg_type='bool', required=False, default=False), + asa_text=dict(arg_type='bool', required=False, default=False), aliases=dict(arg_type='bool', required=False, default=False), content=dict(arg_type='str', required=False), backup=dict(arg_type='bool', default=False, required=False), diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 1fa6397e2..b42dd9500 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -87,6 +87,29 @@ {% endfor %} """ +# Text that will be used for the ASA control chars tests. +# It contains at least one instance of each control char. +ASA_SAMPLE_CONTENT = """ Space, do not advance. +0Newline before printing this line. + This line is not going to be seen. ++This line will overwrite the previous one. + This line will be partially seen because it will be longer than the next line. ++This line will partially overwrite the previous line. +-Three newlines before this one. +1This is a new page. +""" + +ASA_SAMPLE_RETURN = "\nSpace, do not advance.\n\nNewline before printing this line.\nThis line is not going to be seen.\rThis line will overwrite the previous one.\nThis line will be partially seen because it will be longer than the next line.\rThis line will partially overwrite the previous line.\n\n\nThree newlines before this one.\fThis is a new page." + +ASA_COPY_CONTENT = """ Space, do not advance. + 0Newline before printing this line. + This line is not going to be seen. + +This line will overwrite the previous one. + This line will be partially seen because it will be longer than the next line. + +This line will partially overwrite the previous line. + -Three newlines before this one. + 1This is a new page.""" + # SHELL_EXECUTABLE = "/usr/lpp/rsusr/ported/bin/bash" SHELL_EXECUTABLE = "/bin/sh" TEST_PS = "IMSTESTL.IMS01.DDCHKPT" @@ -1580,6 +1603,327 @@ def test_copy_template_file_to_dataset(ansible_zos_module): shutil.rmtree(temp_dir) +@pytest.mark.uss +@pytest.mark.seq +@pytest.mark.asa +def test_copy_asa_file_to_asa_sequential(ansible_zos_module): + hosts = ansible_zos_module + + try: + dest = "USER.ASA.SEQ" + hosts.all.zos_data_set(name=dest, state="absent") + + copy_result = hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=dest, + remote_src=False, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.uss +@pytest.mark.pdse +@pytest.mark.asa +def test_copy_asa_file_to_asa_partitioned(ansible_zos_module): + hosts = ansible_zos_module + + try: + dest = "USER.ASA.PDSE" + hosts.all.zos_data_set(name=dest, state="absent") + full_dest = "{0}(TEST)".format(dest) + + copy_result = hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=full_dest, + remote_src=False, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(full_dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == full_dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.seq +@pytest.mark.asa +def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.SRC.SEQ" + hosts.all.zos_data_set( + name=src, + state="present", + type="seq", + replace=True + ) + + dest = "USER.ASA.SEQ" + hosts.all.zos_data_set(name=dest, state="absent") + + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=src, + remote_src=False + ) + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.seq +@pytest.mark.pdse +@pytest.mark.asa +def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.SRC.SEQ" + hosts.all.zos_data_set( + name=src, + state="present", + type="seq", + replace=True + ) + + dest = "USER.ASA.PDSE" + full_dest = "{0}(MEMBER)".format(dest) + hosts.all.zos_data_set(name=dest, state="absent") + + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=src, + remote_src=False + ) + + copy_result = hosts.all.zos_copy( + src=src, + dest=full_dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(full_dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == full_dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.seq +@pytest.mark.pdse +@pytest.mark.asa +def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.SRC.PDSE" + full_src = "{0}(MEMBER)".format(src) + hosts.all.zos_data_set( + name=src, + state="present", + type="pdse", + replace=True + ) + + dest = "USER.ASA.SEQ" + hosts.all.zos_data_set(name=dest, state="absent") + + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=full_src, + remote_src=False + ) + + copy_result = hosts.all.zos_copy( + src=full_src, + dest=dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.seq +@pytest.mark.pdse +@pytest.mark.asa +def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.SRC.PDSE" + full_src = "{0}(MEMBER)".format(src) + hosts.all.zos_data_set( + name=src, + state="present", + type="pdse", + replace=True + ) + + dest = "USER.ASA.PDSE" + full_dest = "{0}(MEMBER)".format(dest) + hosts.all.zos_data_set(name=dest, state="absent") + + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=full_src, + remote_src=False + ) + + copy_result = hosts.all.zos_copy( + src=full_src, + dest=full_dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\"".format(full_dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == full_dest + assert cp_res.get("dest_created") is True + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") == ASA_SAMPLE_RETURN + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=dest, state="absent") + + +@pytest.mark.uss +@pytest.mark.seq +@pytest.mark.asa +def test_copy_asa_data_set_to_text_file(ansible_zos_module): + hosts = ansible_zos_module + + try: + src = "USER.ASA.SRC" + hosts.all.zos_data_set( + name=src, + state="present", + type="seq", + record_format="FBA", + record_length=80, + block_size=27920, + replace=True + ) + hosts.all.zos_copy( + content=ASA_SAMPLE_CONTENT, + dest=src, + remote_src=False + ) + + dest = "/tmp/zos_copy_asa_test.txt" + + copy_result = hosts.all.zos_copy( + src=src, + dest=dest, + remote_src=True, + asa_text=True + ) + + verify_copy = hosts.all.shell( + cmd="cat {0}".format(dest), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + # Since OPUT preserves all blank spaces associated + # with a record, we strip them before comparing to + # what we expect. + for cp_line, content_line in zip(v_cp.get("stdout_lines"), ASA_COPY_CONTENT.splitlines()): + assert cp_line.rstrip() == content_line + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.file(path=dest, state="absent") + + @pytest.mark.parametrize("src", [ dict(src="/etc/profile", is_remote=False), dict(src="/etc/profile", is_remote=True),]) diff --git a/tests/pytest.ini b/tests/pytest.ini index 4226de838..cd4b8b3f6 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -10,4 +10,5 @@ markers = vsam: VSAM data sets test cases. template: Jinja2 templating test cases. aliases: aliases option test cases. - loadlib: executable copy test cases. \ No newline at end of file + loadlib: executable copy test cases. + asa: ASA text files test cases. \ No newline at end of file From b30f892a8b6031369985df5ff348879cf6c2a1ed Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 31 Oct 2023 13:02:56 -0600 Subject: [PATCH 212/495] Merge staging-v1.8.0-beta.1 back to dev (#1040) * Resolved merge conflicts from cherrypick * Delete changelogs --- CHANGELOG.rst | 50 +++++++++ README.md | 3 +- changelogs/changelog.yaml | 104 ++++++++++++++++++ ...4-zos-copy-add-data-set-member-aliases.yml | 5 - ...18-internal-consolidate-version-checks.yml | 9 -- .../fragments/1028-asa-control-chars.yml | 4 - .../fragments/1029-validate-path-join.yml | 7 -- .../1034-document-utf8-known-issue.yml | 23 ---- ...load_module_and_program_object_support.yml | 6 - ...n_zos_blockinfile_and_set_json_as_true.yml | 2 - ...s-lineinfile-does-not-behave-community.yml | 4 - ...-operator-response-come-back-truncate.yaml | 4 - .../920-zos-copy-add-library-choice.yml | 4 - .../934-Remove-conditional-unnecessary.yml | 2 - ...nhance-Add-wait-zos-operator-and-query.yml | 8 -- ...or-zos-copy-and-remove-temporary-files.yml | 7 -- ...os-job-submit-truncate-final-character.yml | 4 - .../fragments/959-ac-tool-update-mounts.yml | 3 - .../fragments/963-validate-path-join.yml | 5 - ...odify-get_data_set_attributes-function.yml | 3 - .../fragments/965-enhance-archive-tests.yml | 5 - .../966-ac-tool-add-python-311-3.yml | 3 - .../969-Simplify_loadlib_test_cases.yml | 3 - .../fragments/980-zos-copy-disp-shr.yml | 5 - docs/source/modules/zos_copy.rst | 6 +- docs/source/release_notes.rst | 62 +++++++++++ galaxy.yml | 2 +- meta/ibm_zos_core_meta.yml | 2 +- plugins/action/zos_job_submit.py | 2 +- plugins/module_utils/mvs_cmd.py | 2 +- plugins/modules/zos_copy.py | 6 +- plugins/modules/zos_operator_action_query.py | 2 +- .../functional/modules/test_zos_find_func.py | 23 ++-- .../modules/test_zos_job_output_func.py | 2 +- .../test_zos_operator_action_query_func.py | 2 +- .../modules/test_zos_operator_func.py | 2 +- 36 files changed, 247 insertions(+), 139 deletions(-) delete mode 100644 changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml delete mode 100644 changelogs/fragments/1018-internal-consolidate-version-checks.yml delete mode 100644 changelogs/fragments/1028-asa-control-chars.yml delete mode 100644 changelogs/fragments/1029-validate-path-join.yml delete mode 100644 changelogs/fragments/1034-document-utf8-known-issue.yml delete mode 100644 changelogs/fragments/804-improved_load_module_and_program_object_support.yml delete mode 100644 changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml delete mode 100644 changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml delete mode 100644 changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml delete mode 100644 changelogs/fragments/920-zos-copy-add-library-choice.yml delete mode 100644 changelogs/fragments/934-Remove-conditional-unnecessary.yml delete mode 100644 changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml delete mode 100644 changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml delete mode 100644 changelogs/fragments/952-zos-job-submit-truncate-final-character.yml delete mode 100644 changelogs/fragments/959-ac-tool-update-mounts.yml delete mode 100644 changelogs/fragments/963-validate-path-join.yml delete mode 100644 changelogs/fragments/964-modify-get_data_set_attributes-function.yml delete mode 100644 changelogs/fragments/965-enhance-archive-tests.yml delete mode 100644 changelogs/fragments/966-ac-tool-add-python-311-3.yml delete mode 100644 changelogs/fragments/969-Simplify_loadlib_test_cases.yml delete mode 100644 changelogs/fragments/980-zos-copy-disp-shr.yml diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a7c787d05..2c2815de4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,56 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.8.0-beta.1 +============= + +Release Summary +--------------- + +Release Date: '2023-10-24' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- module_utils/template - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_archive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_archive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) +- zos_copy - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) +- zos_copy - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). +- zos_copy - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) +- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) +- zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) +- zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) +- zos_copy - add support in zos_copy for text files and data sets containing ASA control characters. (https://github.com/ansible-collections/ibm_zos_core/pull/1028) + +Bugfixes +-------- + +- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). +- zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) +- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) +- zos_operator - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. https://github.com/ansible-collections/ibm_zos_core/pull/918) + +Known Issues +------------ + +- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) (https://github.com/ansible-collections/ibm_zos_core/issues/972) +- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) + +New Modules +----------- + +- ibm.ibm_zos_core.zos_script - Run scripts in z/OS + v1.7.0 ====== diff --git a/README.md b/README.md index 13f45889f..947740ad5 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,8 @@ querying operator actions, APF authorizing libraries, editing textual data in data sets or Unix System Services files, finding data sets, backing up and restoring data sets and volumes, mounting file systems, running z/OS programs without JCL, -initializing volumes, archiving, unarchiving and templating with Jinja. +running local and remote scripts on z/OS, initializing volumes, +archiving, unarchiving and templating with Jinja. Red Hat Ansible Certified Content for IBM Z diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 6988760f9..2e50559d7 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1023,3 +1023,107 @@ releases: - 930-archive-post-beta.yml - v1.7.0-beta.2_summary.yml release_date: '2023-08-21' + 1.8.0-beta.1: + changes: + bugfixes: + - zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption + value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). + - zos_job_submit - Temporary files were created in tmp directory. Fix now ensures + the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) + - zos_job_submit - The last line of the jcl was missing in the input. Fix now + ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) + - zos_lineinfile - A duplicate entry was made even if line was already present + in the target file. Fix now prevents a duplicate entry if the line already + exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) + - zos_operator - The last line of the operator was missing in the response of + the module. The fix now ensures the presence of the full output of the operator. + https://github.com/ansible-collections/ibm_zos_core/pull/918) + deprecated_features: + - zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). + known_issues: + - Several modules have reported UTF8 decoding errors when interacting with results + that contain non-printable UTF8 characters in the response. This occurs when + a module receives content that does not correspond to a UTF-8 value. These + include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` + but are not limited to this list. This will be addressed in `ibm_zos_core` + version 1.10.0-beta.1. Each case is unique, some options to work around the + error are below. - Specify that the ASA assembler option be enabled to instruct + the assembler to use ANSI control characters instead of machine code control + characters. - Add `ignore_errors:true` to the playbook task so the task error + will not fail the playbook. - If the error is resulting from a batch job, + add `ignore_errors:true` to the task and capture the output into a variable + and extract the job ID with a regular expression and then use `zos_job_output` + to display the DD without the non-printable character such as the DD `JESMSGLG`. + (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) + (https://github.com/ansible-collections/ibm_zos_core/issues/972) + - With later versions of `ansible-core` used with `ibm_zos_core` collection + a warning has started to appear "Module "ansible.builtin.command" returned + non UTF-8 data in the JSON response" that is currently being reviewed. There + are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) + minor_changes: + - module_utils/template - Add validation into path joins to detect unauthorized + path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_archive - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_archive - Enhanced test cases to use test lines the same length of the + record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) + - zos_copy - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) + - zos_copy - Add new option `force_lock` that can copy into data sets that are + already in use by other processes (DISP=SHR). User needs to use with caution + because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). + - zos_copy - includes a new option `executable` that enables copying of executables + such as load modules or program objects to both USS and partitioned data sets. + When the `dest` option contains a non-existent data set, `zos_copy` will create + a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) + - zos_copy - introduces a new option 'aliases' to enable preservation of member + aliases when copying data to partitioned data sets (PDS) destinations from + USS or other PDS sources. Copying aliases of text based members to/from USS + is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) + - zos_fetch - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) + - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) + - zos_operator - Changed system to call 'wait=true' parameter to zoau call. + Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) + - zos_operator_action_query - Add a max delay of 5 seconds on each part of the + operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) + - zos_unarchive - Add validation into path joins to detect unauthorized path + traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_unarchive - Enhanced test cases to use test lines the same length of the + record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) + release_summary: 'Release Date: ''2023-10-24'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1014-zos-copy-add-data-set-member-aliases.yml + - 1018-internal-consolidate-version-checks.yml + - 1029-validate-path-join.yml + - 1034-document-utf8-known-issue.yml + - 804-improved_load_module_and_program_object_support.yml + - 904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml + - 916-zos-lineinfile-does-not-behave-community.yml + - 918-zos-operator-response-come-back-truncate.yaml + - 920-zos-copy-add-library-choice.yml + - 934-Remove-conditional-unnecessary.yml + - 943-enhance-Add-wait-zos-operator-and-query.yml + - 951-Change-copy-for-zos-copy-and-remove-temporary-files.yml + - 952-zos-job-submit-truncate-final-character.yml + - 959-ac-tool-update-mounts.yml + - 963-validate-path-join.yml + - 964-modify-get_data_set_attributes-function.yml + - 965-enhance-archive-tests.yml + - 966-ac-tool-add-python-311-3.yml + - 969-Simplify_loadlib_test_cases.yml + - 980-zos-copy-disp-shr.yml + - v1.8.0-beta.1.yml + modules: + - description: Run scripts in z/OS + name: zos_script + namespace: '' + release_date: '2023-10-24' diff --git a/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml b/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml deleted file mode 100644 index 4122ea878..000000000 --- a/changelogs/fragments/1014-zos-copy-add-data-set-member-aliases.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: -- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases - when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. - Copying aliases of text based members to/from USS is not supported. - (https://github.com/ansible-collections/ibm_zos_core/pull/1014) \ No newline at end of file diff --git a/changelogs/fragments/1018-internal-consolidate-version-checks.yml b/changelogs/fragments/1018-internal-consolidate-version-checks.yml deleted file mode 100644 index 3698ed510..000000000 --- a/changelogs/fragments/1018-internal-consolidate-version-checks.yml +++ /dev/null @@ -1,9 +0,0 @@ -trivial: -- zoau_version_check - Change shell call to include call, for higher responsivity. - (https://github.com/ansible-collections/ibm_zos_core/pull/1027) -- zos_operator - Use new version check - (https://github.com/ansible-collections/ibm_zos_core/pull/1027) -- zos_operator_action_query - Use new version check - (https://github.com/ansible-collections/ibm_zos_core/pull/1027) -- utils/job.py - Use new version check - (https://github.com/ansible-collections/ibm_zos_core/pull/1027) diff --git a/changelogs/fragments/1028-asa-control-chars.yml b/changelogs/fragments/1028-asa-control-chars.yml deleted file mode 100644 index 6afc35e50..000000000 --- a/changelogs/fragments/1028-asa-control-chars.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: -- zos_copy: add support in zos_copy for text files and data sets containing ASA - control characters. - (https://github.com/ansible-collections/ibm_zos_core/pull/1028) \ No newline at end of file diff --git a/changelogs/fragments/1029-validate-path-join.yml b/changelogs/fragments/1029-validate-path-join.yml deleted file mode 100644 index 785c1a41b..000000000 --- a/changelogs/fragments/1029-validate-path-join.yml +++ /dev/null @@ -1,7 +0,0 @@ -minor_changes: - - zos_archive - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - zos_unarchive - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - module_utils/template - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) \ No newline at end of file diff --git a/changelogs/fragments/1034-document-utf8-known-issue.yml b/changelogs/fragments/1034-document-utf8-known-issue.yml deleted file mode 100644 index 860fd3f6c..000000000 --- a/changelogs/fragments/1034-document-utf8-known-issue.yml +++ /dev/null @@ -1,23 +0,0 @@ -known_issues: - - Several modules have reported UTF8 decoding errors when interacting with results - that contain non-printable UTF8 characters in the response. This occurs when - a module receives content that does not correspond to a UTF-8 value. - These include modules `zos_job_submit`, `zos_job_output`, - `zos_operator_action_query` but are not limited to this list. - This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. - Each case is unique, some options to work around the error are below. - - Specify that the ASA assembler option be enabled to instruct the assembler - to use ANSI control characters instead of machine code control characters. - - Add `ignore_errors:true` to the playbook task so the task error will not - fail the playbook. - - If the error is resulting from a batch job, add `ignore_errors:true` to the - task and capture the output into a variable and extract the job ID with a - regular expression and then use `zos_job_output` to display the DD without - the non-printable character such as the DD `JESMSGLG`. - (https://github.com/ansible-collections/ibm_zos_core/issues/677) - (https://github.com/ansible-collections/ibm_zos_core/issues/776) - (https://github.com/ansible-collections/ibm_zos_core/issues/972) - - With later versions of `ansible-core` used with `ibm_zos_core` collection a - warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" - that is currently being reviewed. There are no recommendations at this point. - (https://github.com/ansible-collections/ibm_zos_core/issues/983) diff --git a/changelogs/fragments/804-improved_load_module_and_program_object_support.yml b/changelogs/fragments/804-improved_load_module_and_program_object_support.yml deleted file mode 100644 index 07379c1e3..000000000 --- a/changelogs/fragments/804-improved_load_module_and_program_object_support.yml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: -- zos_copy - includes a new option `executable` that enables copying of executables such - as load modules or program objects to both USS and partitioned data sets. When - the `dest` option contains a non-existent data set, `zos_copy` will create a data set with - the appropriate attributes for an executable. - (https://github.com/ansible-collections/ibm_zos_core/pull/804) \ No newline at end of file diff --git a/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml b/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml deleted file mode 100644 index 9218a0ed3..000000000 --- a/changelogs/fragments/904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml +++ /dev/null @@ -1,2 +0,0 @@ -deprecated_features: - - zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). \ No newline at end of file diff --git a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml b/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml deleted file mode 100644 index 9b13df055..000000000 --- a/changelogs/fragments/916-zos-lineinfile-does-not-behave-community.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. - Fix now prevents a duplicate entry if the line already exists in the target file. - (https://github.com/ansible-collections/ibm_zos_core/pull/916) \ No newline at end of file diff --git a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml b/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml deleted file mode 100644 index 1e2d3c10f..000000000 --- a/changelogs/fragments/918-zos-operator-response-come-back-truncate.yaml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_operator - The last line of the operator was missing in the response of - the module. The fix now ensures the presence of the full output of the operator. - https://github.com/ansible-collections/ibm_zos_core/pull/918) \ No newline at end of file diff --git a/changelogs/fragments/920-zos-copy-add-library-choice.yml b/changelogs/fragments/920-zos-copy-add-library-choice.yml deleted file mode 100644 index 2d339227b..000000000 --- a/changelogs/fragments/920-zos-copy-add-library-choice.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. - Documentation updated to reflect this change. - (https://github.com/ansible-collections/ibm_zos_core/pull/968). \ No newline at end of file diff --git a/changelogs/fragments/934-Remove-conditional-unnecessary.yml b/changelogs/fragments/934-Remove-conditional-unnecessary.yml deleted file mode 100644 index 3ceeffa99..000000000 --- a/changelogs/fragments/934-Remove-conditional-unnecessary.yml +++ /dev/null @@ -1,2 +0,0 @@ -trivial: - - zos_blockinfile - remove test conditional unnecessary (https://github.com/ansible-collections/ibm_zos_core/pull/934). \ No newline at end of file diff --git a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml b/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml deleted file mode 100644 index 91f920145..000000000 --- a/changelogs/fragments/943-enhance-Add-wait-zos-operator-and-query.yml +++ /dev/null @@ -1,8 +0,0 @@ -minor_changes: - - zos_operator - Changed system to call 'wait=true' parameter to zoau call. - Requires zoau 1.2.5 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. - Requires zoau 1.2.5 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/976) - diff --git a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml b/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml deleted file mode 100644 index 99a0599ec..000000000 --- a/changelogs/fragments/951-Change-copy-for-zos-copy-and-remove-temporary-files.yml +++ /dev/null @@ -1,7 +0,0 @@ -bugfixes: - - zos_job_submit - Temporary files were created in tmp directory. - Fix now ensures the deletion of files every time the module run. - (https://github.com/ansible-collections/ibm_zos_core/pull/951) -minor_changes: - - zos_job_submit - Change action plugin call from copy to zos_copy. - (https://github.com/ansible-collections/ibm_zos_core/pull/951) \ No newline at end of file diff --git a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml b/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml deleted file mode 100644 index 7a4ce88cb..000000000 --- a/changelogs/fragments/952-zos-job-submit-truncate-final-character.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- zos_job_submit - The last line of the jcl was missing in the input. - Fix now ensures the presence of the full input in job_submit. - (https://github.com/ansible-collections/ibm_zos_core/pull/952) \ No newline at end of file diff --git a/changelogs/fragments/959-ac-tool-update-mounts.yml b/changelogs/fragments/959-ac-tool-update-mounts.yml deleted file mode 100644 index 4eb90122d..000000000 --- a/changelogs/fragments/959-ac-tool-update-mounts.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- ac - Add ZOAU 1.2.4 and 1.2.5 mounts. - (https://github.com/ansible-collections/ibm_zos_core/pull/959) \ No newline at end of file diff --git a/changelogs/fragments/963-validate-path-join.yml b/changelogs/fragments/963-validate-path-join.yml deleted file mode 100644 index 129af357e..000000000 --- a/changelogs/fragments/963-validate-path-join.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_fetch - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/962) - - zos_copy - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/962) \ No newline at end of file diff --git a/changelogs/fragments/964-modify-get_data_set_attributes-function.yml b/changelogs/fragments/964-modify-get_data_set_attributes-function.yml deleted file mode 100644 index da384c77b..000000000 --- a/changelogs/fragments/964-modify-get_data_set_attributes-function.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- zos_copy - modify get_data_set_attributes helper function to no longer overwrite caller-defined attributes. - (https://github.com/ansible-collections/ibm_zos_core/pull/964) \ No newline at end of file diff --git a/changelogs/fragments/965-enhance-archive-tests.yml b/changelogs/fragments/965-enhance-archive-tests.yml deleted file mode 100644 index 80705e4c1..000000000 --- a/changelogs/fragments/965-enhance-archive-tests.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_archive - Enhanced test cases to use test lines the same length of the record length. - (https://github.com/ansible-collections/ibm_zos_core/pull/965) - - zos_unarchive - Enhanced test cases to use test lines the same length of the record length. - (https://github.com/ansible-collections/ibm_zos_core/pull/965) \ No newline at end of file diff --git a/changelogs/fragments/966-ac-tool-add-python-311-3.yml b/changelogs/fragments/966-ac-tool-add-python-311-3.yml deleted file mode 100644 index 231d3e2be..000000000 --- a/changelogs/fragments/966-ac-tool-add-python-311-3.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- ac - Add python 3.11-3 mount table. - (https://github.com/ansible-collections/ibm_zos_core/pull/966) \ No newline at end of file diff --git a/changelogs/fragments/969-Simplify_loadlib_test_cases.yml b/changelogs/fragments/969-Simplify_loadlib_test_cases.yml deleted file mode 100644 index ce2060ed8..000000000 --- a/changelogs/fragments/969-Simplify_loadlib_test_cases.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: -- zos_copy - Divide large test case for loadlibs and simplify functions. - (https://github.com/ansible-collections/ibm_zos_core/pull/969) \ No newline at end of file diff --git a/changelogs/fragments/980-zos-copy-disp-shr.yml b/changelogs/fragments/980-zos-copy-disp-shr.yml deleted file mode 100644 index 541e611c1..000000000 --- a/changelogs/fragments/980-zos-copy-disp-shr.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: -- zos_copy - Add new option `force_lock` that can copy into data sets that are - already in use by other processes (DISP=SHR). User needs to use with caution - because this is subject to race conditions and can lead to data loss. - (https://github.com/ansible-collections/ibm_zos_core/pull/980). diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index e19332bf4..004671ebc 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -158,7 +158,7 @@ force force_lock - By default, when c(dest) is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. + By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. @@ -882,7 +882,7 @@ destination_attributes checksum SHA256 checksum of the file after running zos_copy. - | **returned**: C(validate) is C(true) and if dest is USS + | **returned**: When ``validate=true`` and if ``dest`` is USS | **type**: str | **sample**: 8d320d5f68b048fc97559d771ede68b37a71e8374d1d678d96dcfa2b2da7a64e @@ -945,7 +945,7 @@ state note A note to the user after module terminates. - | **returned**: C(force) is C(false) and dest exists + | **returned**: When ``force=true`` and ``dest`` exists | **type**: str | **sample**: No data was copied diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index de1a27013..10150952d 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,6 +6,66 @@ Releases ======== +Version 1.8.0-beta.1 +==================== + +New Modules +----------- + +- ``zos_script`` - Run scripts in z/OS + +Minor Changes +------------- +- ``zos_archive`` + + - Add validation into path joins to detect unauthorized path traversals. + - Enhanced test cases to use test lines the same length of the record length. +- ``zos_copy`` + + - Add validation into path joins to detect unauthorized path traversals. + - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. + - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. + - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. + - add support in zos_copy for text files and data sets containing ASA control characters. +- ``zos_fetch`` - Add validation into path joins to detect unauthorized path traversals. +- ``zos_job_submit`` - Change action plugin call from copy to zos_copy. +- ``zos_operator`` - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. +- ``zos_operator_action_query`` - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. +- ``zos_unarchive`` + + - Add validation into path joins to detect unauthorized path traversals. + - Enhanced test cases to use test lines the same length of the record length. +- ``module_utils/template`` - Add validation into path joins to detect unauthorized path traversals. + +Bugfixes +-------- + +- ``zos_copy`` - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. +- ``zos_job_submit`` - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. +- ``zos_job_submit`` - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. +- ``zos_lineinfile`` - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. +- ``zos_operator`` - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. + +Known Issues +------------ + +- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. +- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS®`_ V2R4 or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.4`_ (or later) but prior to version 1.3. + Version 1.7.0 ============= @@ -828,6 +888,8 @@ Known issues https://www.ibm.com/docs/en/zoau/1.2.x .. _Z Open Automation Utilities 1.2.3: https://www.ibm.com/docs/en/zoau/1.2.x +.. _Z Open Automation Utilities 1.2.4: + https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS®: diff --git a/galaxy.yml b/galaxy.yml index f5c0ccf46..b83b1014a 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.7.0 +version: 1.8.0-beta.1 # Collection README file readme: README.md diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 51e4c7392..7a68a05bb 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.7.0" +version: "1.8.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index db3fb1fd7..630ce7969 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -90,7 +90,7 @@ def run(self, tmp=None, task_vars=None): source_full = None try: source_full = self._loader.get_real_file(source) - source_rel = os.path.basename(source) + # source_rel = os.path.basename(source) except AnsibleFileNotFound as e: result["failed"] = True result["msg"] = "could not find src=%s, %s" % (source_full, e) diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index 21d2b5a7e..ec4955ac6 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9bafdc471..ec48910e0 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -172,7 +172,7 @@ required: false force_lock: description: - - By default, when c(dest) is a MVS data set and is being used by another + - By default, when C(dest) is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use C(force_lock) to bypass this check and continue with copy. - If set to C(true) and destination is a MVS data set opened by another @@ -715,7 +715,7 @@ } checksum: description: SHA256 checksum of the file after running zos_copy. - returned: C(validate) is C(true) and if dest is USS + returned: When ``validate=true`` and if ``dest`` is USS type: str sample: 8d320d5f68b048fc97559d771ede68b37a71e8374d1d678d96dcfa2b2da7a64e backup_name: @@ -760,7 +760,7 @@ sample: file note: description: A note to the user after module terminates. - returned: C(force) is C(false) and dest exists + returned: When ``force=true`` and ``dest`` exists type: str sample: No data was copied msg: diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index ddf895eb9..a06535763 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index fb1a47179..345927fe5 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -233,15 +233,22 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module): def test_find_data_sets_in_volume(ansible_zos_module): - hosts = ansible_zos_module + try: + hosts = ansible_zos_module + data_set_name = "TEST.FIND.SEQ" + volume = "000000" + # Create temp data set + hosts.all.zos_data_set(name=data_set_name, type="seq", state="present", volumes=[volume]) + find_res = hosts.all.zos_find( + patterns=[data_set_name], volumes=[volume] + ) + print(vars(find_res)) + for val in find_res.contacted.values(): + assert len(val.get('data_sets')) >= 1 + assert val.get('matched') >= 1 + finally: + hosts.all.zos_data_set(name=data_set_name, state="absent") - find_res = hosts.all.zos_find( - patterns=['USER.*'], volumes=['IMSSUN'] - ) - print(vars(find_res)) - for val in find_res.contacted.values(): - assert len(val.get('data_sets')) >= 1 - assert val.get('matched') >= 1 def test_find_vsam_pattern(ansible_zos_module): diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 4b3990ab5..11b7cd90d 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022 +# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index 30f5175e4..c7afab2f9 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 4ad07d882..5aebe2a9c 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From dc5cdf686d605f5ed7c4b64c6e7f43811fd05595 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 6 Nov 2023 11:38:56 -0600 Subject: [PATCH 213/495] Fix sending a local archive into remote fails (#1045) * Added test * Added test for local archive and then remote unarchive * Added changelog * Removed format fixture * Ensure tempfile cleanup --- .../fragments/1045-local-uss-unarchive.yml | 5 ++ plugins/action/zos_unarchive.py | 4 +- .../modules/test_zos_unarchive_func.py | 46 ++++++++++++++++++- 3 files changed, 52 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/1045-local-uss-unarchive.yml diff --git a/changelogs/fragments/1045-local-uss-unarchive.yml b/changelogs/fragments/1045-local-uss-unarchive.yml new file mode 100644 index 000000000..84bc5508c --- /dev/null +++ b/changelogs/fragments/1045-local-uss-unarchive.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_unarchive - Using a local file with a USS format option failed when sending to + remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set + as None when using a USS format option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1045). \ No newline at end of file diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index 19cbf5ead..d808647ef 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -61,8 +61,6 @@ def run(self, tmp=None, task_vars=None): format_name = format.get("name") copy_module_args = dict() dest_data_set = format.get("dest_data_set") - if dest_data_set is None: - dest_data_set = dict() dest = "" if source.startswith('~'): source = os.path.expanduser(source) @@ -73,6 +71,8 @@ def run(self, tmp=None, task_vars=None): module_name="tempfile", module_args={}, task_vars=task_vars, ).get("path") elif format_name in MVS_SUPPORTED_FORMATS: + if dest_data_set is None: + dest_data_set = dict() tmp_hlq = module_args.get("tmp_hlq") if module_args.get("tmp_hlq") is not None else "" cmd_res = self._execute_module( module_name="command", diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 46a1e8534..2faba0023 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -87,6 +87,7 @@ def create_multiple_members(ansible_zos_module, pds_name, member_base_name, n): - test_uss_unarchive_include - test_uss_unarchive_exclude - test_uss_unarchive_list +- test_uss_unarchive_copy_to_remote """ @@ -248,7 +249,7 @@ def test_uss_unarchive_list(ansible_zos_module, format): @pytest.mark.uss @pytest.mark.parametrize("format", USS_FORMATS) -def test_uss_single_archive_with_mode(ansible_zos_module, format): +def test_uss_single_unarchive_with_mode(ansible_zos_module, format): try: hosts = ansible_zos_module hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") @@ -283,6 +284,49 @@ def test_uss_single_archive_with_mode(ansible_zos_module, format): finally: hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") +@pytest.mark.uss +def test_uss_unarchive_copy_to_remote(ansible_zos_module): + try: + import os + import tarfile + hosts = ansible_zos_module + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + hosts.all.file(path=USS_TEMP_DIR, state="directory") + set_uss_test_env(hosts, USS_TEST_FILES) + # create local tmp dir + tmp_dir = tempfile.TemporaryDirectory() + tmp_file = tempfile.NamedTemporaryFile(delete=False) + tar_file = tmp_dir.name + "/tmpfile.tar" + # create local file + with open(tmp_file.name, 'w') as f: + f.write("This is a sample text for the file") + # archive using different formats + with tarfile.open(tar_file, 'w') as tar: + tar.add(tmp_file.name) + + # remove files + for file in USS_TEST_FILES.keys(): + hosts.all.file(path=file, state="absent") + unarchive_result = hosts.all.zos_unarchive( + src=tar_file, + dest=USS_TEMP_DIR, + format=dict( + name="tar" + ), + force=True, + ) + + for result in unarchive_result.contacted.values(): + assert result.get("failed", False) is False + assert result.get("changed") is True + # Command to assert the file is in place + cmd_result = hosts.all.shell(cmd="ls {0}/{1}".format(USS_TEMP_DIR, tmp_file.name)) + for c_result in cmd_result.contacted.values(): + for file in USS_TEST_FILES.keys(): + assert tmp_file.name in c_result.get("stdout") + finally: + hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + os.remove(tmp_file.name) ###################################################################### # From b9ace9028bff04a8ebfac48f3d342da6d43308f6 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 8 Nov 2023 11:55:31 -0600 Subject: [PATCH 214/495] Clean temporary data sets created during XMIT unarchive operation (#1049) * Added a temp cleanup * Added changelog * Modified changelog * Added removal of src if remote_src is False Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Modified changelog fragments --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> --- changelogs/fragments/1049-xmit-temporary-data-sets.yml | 4 ++++ plugins/modules/zos_unarchive.py | 5 ++++- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1049-xmit-temporary-data-sets.yml diff --git a/changelogs/fragments/1049-xmit-temporary-data-sets.yml b/changelogs/fragments/1049-xmit-temporary-data-sets.yml new file mode 100644 index 000000000..5ef0f2078 --- /dev/null +++ b/changelogs/fragments/1049-xmit-temporary-data-sets.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the + temporary data sets created. Fix now removes the temporary data sets. + (https://github.com/ansible-collections/ibm_zos_core/pull/1049). \ No newline at end of file diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index be7c93f5c..81737ed29 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -853,7 +853,8 @@ def unpack(self, src, dest): dds = {'args': 'UNPACK', 'sysut1': src, 'sysut2': dest} rc, out, err = mvs_cmd.amaterse(cmd="", dds=dds) if rc != 0: - self.clean_environment(data_sets=[dest], uss_files=[], remove_targets=True) + ds_remove_list = [dest, src] if not self.remote_src else [dest] + self.clean_environment(data_sets=ds_remove_list, uss_files=[], remove_targets=True) self.module.fail_json( msg="Failed executing AMATERSE to restore {0} into {1}".format(src, dest), stdout=out, @@ -881,6 +882,8 @@ def unpack(self, src, dest): """.format(src, dest) rc, out, err = mvs_cmd.ikjeft01(cmd=unpack_cmd, authorized=True) if rc != 0: + ds_remove_list = [dest, src] if not self.remote_src else [dest] + self.clean_environment(data_sets=ds_remove_list, uss_files=[], remove_targets=True) self.module.fail_json( msg="Failed executing RECEIVE to restore {0} into {1}".format(src, dest), stdout=out, From 437c0dadff44a7b90b1fe0c808a590c9e88c720a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 9 Nov 2023 10:56:16 -0600 Subject: [PATCH 215/495] Enabler/1002/test_collections_on_ansible core 2_16 (#1053) * Add sanity ignore * Move dependencyfinder * Revert "Move dependencyfinder" This reverts commit 2bbbc5adffe94b32dd6d1af12f7c7cace93cca94. * Modify dependecyfinder * Modify dependecyfinder * Modify dependecyfinder * Add fragment * Modify fragment --- ...053-Enabler_1002_test_collections_on_ansible_core_2_16.yml | 4 ++++ tests/sanity/ignore-2.16.txt | 2 ++ 2 files changed, 6 insertions(+) create mode 100644 changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml diff --git a/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml b/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml new file mode 100644 index 000000000..ac3c24bb5 --- /dev/null +++ b/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml @@ -0,0 +1,4 @@ +trivial: + - zos_archive - add missing-gplv3-license ignore to ignore 2.16. + - zos_unarchive - add missing-gplv3-license ignore to ignore 2.16. + (https://github.com/ansible-collections/ibm_zos_core/pull/1053). diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index a4835475f..70d4764e1 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -34,3 +34,5 @@ plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From ed65e8edd467797c051e9e11bd2268a7f78c6af4 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 9 Nov 2023 14:03:47 -0600 Subject: [PATCH 216/495] [Enabler] [zos_mvs_raw] Remove Try, Except, Pass from code (#1051) * Added action inside exception to avoid pass * Added action inside exception to avoid pass * Added changelog --- changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml | 4 ++++ plugins/modules/zos_mvs_raw.py | 4 +++- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml diff --git a/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml b/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml new file mode 100644 index 000000000..59b33d02c --- /dev/null +++ b/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml @@ -0,0 +1,4 @@ +trivial: + - zos_mvs_raw - Removed Try, Except, Pass from the code, try block is in place to ignore any errors, + pass statement was changed to a variable assignment. This does not change any behavior. + (https://github.com/ansible-collections/ibm_zos_core/pull/1051). \ No newline at end of file diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index fa6f71908..55937ea63 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -2766,7 +2766,9 @@ def data_set_exists(name, volumes=None): present, changed = DataSet.attempt_catalog_if_necessary(name, volumes) exists = present except Exception: - pass + # Failure locating or cataloging the data set. Go ahead assumming it does not exist. + # exists = False to avoid using pass clause which results in bandit warning. + exists = False return exists From c2ec92282d2af52f175df9efc157850df999eeb8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 9 Nov 2023 14:04:30 -0600 Subject: [PATCH 217/495] Update sanity test ignore (#1048) * Fixed Sanity ignore #6 and reduce use of Sanity issues 8-9 * Remove unused import * Remove ignore * Add fragment * Modify fragment * Change fragment * Update 1048-Update_sanity_tests_ignore.yml * Change ignore 2_dot_14 --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1048-Update_sanity_tests_ignore.yml | 8 ++++++++ plugins/action/zos_copy.py | 5 +---- plugins/modules/zos_copy.py | 14 +++++++++----- tests/sanity/ignore-2.14.txt | 2 -- tests/sanity/ignore-2.15.txt | 2 -- 5 files changed, 18 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1048-Update_sanity_tests_ignore.yml diff --git a/changelogs/fragments/1048-Update_sanity_tests_ignore.yml b/changelogs/fragments/1048-Update_sanity_tests_ignore.yml new file mode 100644 index 000000000..5d2960d28 --- /dev/null +++ b/changelogs/fragments/1048-Update_sanity_tests_ignore.yml @@ -0,0 +1,8 @@ +trivial: + - zos_copy - change data type of parameter src from path to str inside AnsibleModule util. + - zos_copy - deprecate add_file_common_args argument. + - zos_copy - add owner and group to parameters inside AnsibleModule util. + - zos_copy - remove copy_member of AnsibleModule util as parameter and add to code logic. + - zos_copy - remove doc-default-does-not-match-spec ignore to ignore 2.14. + - zos_copy - remove doc-type-does-not-match-spec ignore to ignore 2.14. + (https://github.com/ansible-collections/ibm_zos_core/pull/1048). diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index d7d00eb64..592126b00 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -70,7 +70,7 @@ def run(self, tmp=None, task_vars=None): group = task_args.get("group", None) is_pds = is_src_dir = False - temp_path = is_uss = is_mvs_dest = copy_member = src_member = None + temp_path = is_uss = is_mvs_dest = src_member = None if dest: if not isinstance(dest, string_types): @@ -104,8 +104,6 @@ def run(self, tmp=None, task_vars=None): is_src_dir = os.path.isdir(src) is_pds = is_src_dir and is_mvs_dest - copy_member = is_member(dest) - if not src and not content: msg = "'src' or 'content' is required" return self._exit_action(result, msg, failed=True) @@ -249,7 +247,6 @@ def run(self, tmp=None, task_vars=None): is_uss=is_uss, is_pds=is_pds, is_src_dir=is_src_dir, - copy_member=copy_member, src_member=src_member, temp_path=temp_path, is_mvs_dest=is_mvs_dest, diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index ec48910e0..d6559e793 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -114,7 +114,7 @@ be deleted and recreated following the process outlined in the C(volume) option. - When the C(dest) is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined - in the C(volume) option. + in the C(volume) option. - When C(dest) is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the C(volume) option. @@ -813,6 +813,9 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( + is_member +) from ansible.module_utils._text import to_bytes, to_native from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six import PY3 @@ -2566,7 +2569,6 @@ def run_module(module, arg_def): is_mvs_dest = module.params.get('is_mvs_dest') temp_path = module.params.get('temp_path') src_member = module.params.get('src_member') - copy_member = module.params.get('copy_member') tmphlq = module.params.get('tmp_hlq') force = module.params.get('force') force_lock = module.params.get('force_lock') @@ -2576,6 +2578,8 @@ def run_module(module, arg_def): if volume: dest_data_set["volumes"] = [volume] + copy_member = is_member(dest) + # ******************************************************************** # When copying to and from a data set member, 'dest' or 'src' will be # in the form DATA.SET.NAME(MEMBER). When this is the case, extract the @@ -3053,7 +3057,7 @@ def run_module(module, arg_def): def main(): module = AnsibleModule( argument_spec=dict( - src=dict(type='path'), + src=dict(type='str'), dest=dict(required=True, type='str'), is_binary=dict(type='bool', default=False), executable=dict(type='bool', default=False), @@ -3145,15 +3149,15 @@ def main(): is_mvs_dest=dict(type='bool'), size=dict(type='int'), temp_path=dict(type='str'), - copy_member=dict(type='bool'), src_member=dict(type='bool'), local_charset=dict(type='str'), force=dict(type='bool', default=False), force_lock=dict(type='bool', default=False), mode=dict(type='str', required=False), + owner=dict(type='str', required=False), + group=dict(type='str', required=False), tmp_hlq=dict(type='str', required=False, default=None), ), - add_file_common_args=True, ) arg_def = dict( diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 70d4764e1..415196660 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:doc-default-does-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc type should be str, while spec type is path to allow user path expansion plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 70d4764e1..415196660 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:doc-default-does-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc type should be str, while spec type is path to allow user path expansion plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin From 8148d54b7c15d1e0fe2c7a565ba61ac7c2217e48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 9 Nov 2023 14:53:55 -0600 Subject: [PATCH 218/495] Update_sanity_ignore 2_16 (#1056) * Update sanity ignore 2_16 * Add fragment * Change fragment --- changelogs/fragments/1056-Update_sanity_ignore_2_16.yml | 4 ++++ tests/sanity/ignore-2.16.txt | 2 -- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1056-Update_sanity_ignore_2_16.yml diff --git a/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml b/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml new file mode 100644 index 000000000..a5b192519 --- /dev/null +++ b/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml @@ -0,0 +1,4 @@ +trivial: + - zos_copy - remove doc-default-does-not-match-spec 2.16 ignore file. + - zos_copy - remove doc-type-does-not-match-spec 2.16 ignore file. + (https://github.com/ansible-collections/ibm_zos_core/pull/1056). diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 70d4764e1..415196660 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:doc-default-does-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc type should be str, while spec type is path to allow user path expansion plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin From 2aed55c21599e41cba69f6dba88d7a3d99b8194a Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 9 Nov 2023 18:40:45 -0600 Subject: [PATCH 219/495] [Enabler] [module_utils/dd_statement.py] Changed try except pass to except specific DatasetDeleteError exception (#1052) * Changed try except pass to except specific class * Added changelog * Update 1052-try-except-pass-dd-statement.yml * Update dd_statement.py updated copyright year --- changelogs/fragments/1052-try-except-pass-dd-statement.yml | 4 ++++ plugins/module_utils/dd_statement.py | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1052-try-except-pass-dd-statement.yml diff --git a/changelogs/fragments/1052-try-except-pass-dd-statement.yml b/changelogs/fragments/1052-try-except-pass-dd-statement.yml new file mode 100644 index 000000000..42315337c --- /dev/null +++ b/changelogs/fragments/1052-try-except-pass-dd-statement.yml @@ -0,0 +1,4 @@ +trivial: + - zos_mvs_raw - Removed Try, Except, Pass from the code, instead catching DatasetDeleteError + and pass only in that case, any other exception will be raised. + (https://github.com/ansible-collections/ibm_zos_core/pull/1052). diff --git a/plugins/module_utils/dd_statement.py b/plugins/module_utils/dd_statement.py index ded94dbec..d35f9e44e 100644 --- a/plugins/module_utils/dd_statement.py +++ b/plugins/module_utils/dd_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -661,7 +661,7 @@ def __del__(self): """ try: DataSet.delete(self.name) - except Exception: + except DataSet.DatasetDeleteError: pass def _build_arg_string(self): From 5feec01d9514303e4bd1ac02b712eabe22e53ad2 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 14 Nov 2023 10:14:18 -0600 Subject: [PATCH 220/495] Removed run command use of subprocess from encode defaults infavor of using AnsibleModule run command (#1055) * Removed run command use of subprocess from encode defaults infavor of using ansible module * Added changelog --- .../1055-remove-subprocess-encode.yml | 4 + plugins/module_utils/encode.py | 3 +- plugins/module_utils/system.py | 102 +++++++++--------- 3 files changed, 57 insertions(+), 52 deletions(-) create mode 100644 changelogs/fragments/1055-remove-subprocess-encode.yml diff --git a/changelogs/fragments/1055-remove-subprocess-encode.yml b/changelogs/fragments/1055-remove-subprocess-encode.yml new file mode 100644 index 000000000..7e458dc09 --- /dev/null +++ b/changelogs/fragments/1055-remove-subprocess-encode.yml @@ -0,0 +1,4 @@ +trivial: + - encode_utils - Removed use of subprocess from system utils, since the only + use of it could be replaced for AnsibleModule runcommand method. + (https://github.com/ansible-collections/ibm_zos_core/pull/1055). \ No newline at end of file diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 047aa654c..c36d0b272 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -62,7 +62,8 @@ def get_default_system_charset(): """ system_charset = locale.getdefaultlocale()[1] if system_charset is None: - rc, out, err = system.run_command("locale -c charmap") + module = AnsibleModuleHelper(argument_spec={}) + rc, out, err = module.run_command("locale -c charmap") if rc != 0 or not out or err: if system.is_zos(): system_charset = Defaults.DEFAULT_EBCDIC_USS_CHARSET diff --git a/plugins/module_utils/system.py b/plugins/module_utils/system.py index 5be6d1944..5a452a48a 100644 --- a/plugins/module_utils/system.py +++ b/plugins/module_utils/system.py @@ -15,7 +15,7 @@ from platform import platform from os import name as OS_NAME from sys import platform as SYS_PLATFORM -from subprocess import Popen, PIPE +# from subprocess import Popen, PIPE from ansible.module_utils.six import binary_type, text_type, PY2, PY3 # from ansible.module_utils._text import to_text, to_bytes from ansible.module_utils.common.text.converters import to_bytes, to_text @@ -76,53 +76,53 @@ def is_zos(): return is_zos_unix and SYS_PLATFORM == "zos" -def run_command(args, stdin=None, **kwargs): - """ Execute a shell command on the current system. This function should only - be used when AnsibleModule.run_command() is not available. This function - essentially serves as a wrapper for Python subprocess.Popen and supports all - of the arguments supported by Popen. - - Required arguments: - args: args should be a sequence of program arguments or else a single - string or path-like object. By default, the program to execute is the - first item in args if args is a sequence. It is recommended to pass - args as a sequence. - - Refer to the following link for a more detailed description of this - parameter and other parameters. - https://docs.python.org/3/library/subprocess.html#subprocess.Popen - - Returns: - tuple[int, str, str]: The return code, stdout and stderr produced after - executing the command. - """ - rc = out = err = None - if not isinstance(args, (list, binary_type, text_type)): - rc = -1 - err = "'args' must be list or string" - return rc, out, err - - if isinstance(args, (text_type, str)): - if PY2: - args = to_bytes(args, errors='surrogate_or_strict') - elif PY3: - args = to_text(args, errors='surrogateescape') - args = split(args) - - kwargs.update( - dict( - stdin=PIPE if stdin else None, - stderr=PIPE, - stdout=PIPE - ) - ) - try: - cmd = Popen(args, **kwargs) - except TypeError as proc_err: - rc = -1 - err = str(proc_err) - return rc, out, err - - out, err = tuple(map(to_text, cmd.communicate())) - rc = cmd.returncode - return rc, out, err +# def run_command(args, stdin=None, **kwargs): +# """ Execute a shell command on the current system. This function should only +# be used when AnsibleModule.run_command() is not available. This function +# essentially serves as a wrapper for Python subprocess.Popen and supports all +# of the arguments supported by Popen. + +# Required arguments: +# args: args should be a sequence of program arguments or else a single +# string or path-like object. By default, the program to execute is the +# first item in args if args is a sequence. It is recommended to pass +# args as a sequence. + +# Refer to the following link for a more detailed description of this +# parameter and other parameters. +# https://docs.python.org/3/library/subprocess.html#subprocess.Popen + +# Returns: +# tuple[int, str, str]: The return code, stdout and stderr produced after +# executing the command. +# """ +# rc = out = err = None +# if not isinstance(args, (list, binary_type, text_type)): +# rc = -1 +# err = "'args' must be list or string" +# return rc, out, err + +# if isinstance(args, (text_type, str)): +# if PY2: +# args = to_bytes(args, errors='surrogate_or_strict') +# elif PY3: +# args = to_text(args, errors='surrogateescape') +# args = split(args) + +# kwargs.update( +# dict( +# stdin=PIPE if stdin else None, +# stderr=PIPE, +# stdout=PIPE +# ) +# ) +# try: +# cmd = Popen(args, **kwargs) +# except TypeError as proc_err: +# rc = -1 +# err = str(proc_err) +# return rc, out, err + +# out, err = tuple(map(to_text, cmd.communicate())) +# rc = cmd.returncode +# return rc, out, err From 6cd4f7c12533d70df3eb2741ee829dab97bfb391 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 14 Nov 2023 10:16:10 -0600 Subject: [PATCH 221/495] [1.9.0] zos_apf remove try expect pass to better exception handling (#1036) * Removed except pass * Added empty strings * Added changelog * Corrected changelog * Modified if statement to honor current behavior * Update 1036-apf-try-except.yml * Update 1036-apf-try-except.yml --- changelogs/fragments/1036-apf-try-except.yml | 4 ++ plugins/modules/zos_apf.py | 40 ++++++++++---------- 2 files changed, 23 insertions(+), 21 deletions(-) create mode 100644 changelogs/fragments/1036-apf-try-except.yml diff --git a/changelogs/fragments/1036-apf-try-except.yml b/changelogs/fragments/1036-apf-try-except.yml new file mode 100644 index 000000000..16e8ab6c7 --- /dev/null +++ b/changelogs/fragments/1036-apf-try-except.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_apf - Improves exception handling if there is a failure + parsing the command response when operation selected is list. + (https://github.com/ansible-collections/ibm_zos_core/pull/1036). diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index d0fec1ff5..dee6094fc 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -520,29 +520,27 @@ def main(): operRc = ret.rc result['stderr'] = operErr result['rc'] = operRc + result['stdout'] = operOut if operation == 'list': - try: - dsRx = "" - volRx = "" - if library: - dsRx = re.compile(library) - if volume: - volRx = re.compile(volume) - if sms: - sms = "*SMS*" - if dsRx or volRx or sms: + if not library: + library = "" + if not volume: + volume = "" + if sms: + sms = "*SMS*" + if library or volume or sms: + try: data = json.loads(operOut) - operOut = "" - for d in data[2:]: - ds = d.get('ds') - vol = d.get('vol') - if (dsRx and dsRx.match(ds)) or (volRx and volRx.match(vol)) or (sms and sms == vol): - operOut = operOut + "{0} {1}\n".format(vol, ds) - except Exception: - pass - - result['stdout'] = operOut - + except json.JSONDecodeError: + module.exit_json(**result) + for d in data[2:]: + ds = d.get('ds') + vol = d.get('vol') + try: + if (library and re.match(library, ds)) or (volume and re.match(volume, vol)) or (sms and sms == vol): + result['stdout'] = "{0} {1}\n".format(vol, ds) + except re.error: + module.exit_json(**result) module.exit_json(**result) From 5e744c29e4b71f960e5ffc5c99115635f1996ad2 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 14 Nov 2023 10:34:09 -0600 Subject: [PATCH 222/495] [v1.9.0] Replace randint to fix "Standard pseudo-random generators ... " bandit warning (#1016) * Changed random member name generation to choices * Modified choices in module_utils/data_set and blockinfile test * Added changelog fragment * Remove randint import --- changelogs/fragments/1016-remove-randint.yml | 5 +++++ plugins/module_utils/data_set.py | 9 +++++---- tests/functional/modules/test_zos_blockinfile_func.py | 4 ++-- 3 files changed, 12 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1016-remove-randint.yml diff --git a/changelogs/fragments/1016-remove-randint.yml b/changelogs/fragments/1016-remove-randint.yml new file mode 100644 index 000000000..baac7fff9 --- /dev/null +++ b/changelogs/fragments/1016-remove-randint.yml @@ -0,0 +1,5 @@ +trivial: + - module_utils/data_set - Replace the use of random.randint to random.sample + to generate random member names, random.randint raised a warning while + scanning with bandit. + (https://github.com/ansible-collections/ibm_zos_core/pull/1016) \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index cbeb7eb7d..ab7a3c3c8 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -17,7 +17,7 @@ import tempfile from os import path, walk from string import ascii_uppercase, digits -from random import randint +from random import sample # from ansible.module_utils._text import to_bytes from ansible.module_utils.common.text.converters import to_bytes from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( @@ -1745,9 +1745,10 @@ def temp_member_name(): """Generate a temp member name""" first_char_set = ascii_uppercase + "#@$" rest_char_set = ascii_uppercase + digits + "#@$" - temp_name = first_char_set[randint(0, len(first_char_set) - 1)] - for i in range(7): - temp_name += rest_char_set[randint(0, len(rest_char_set) - 1)] + # using sample as k=1 and k=7 to avoid using random.choice just for oneline import + temp_name = sample(first_char_set, k=1) + temp_name += sample(rest_char_set, k=7) + temp_name = "".join(temp_name) return temp_name diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 226f34477..d768ad59d 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1205,6 +1205,7 @@ def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, ds def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup_name): hosts = ansible_zos_module ds_type = dstype + backup_ds_name = "" params = dict(block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) if backup_name: params["backup_name"] = backup_name @@ -1227,8 +1228,7 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup remove_ds_environment(ansible_zos_module, ds_name) if backup_name: ansible_zos_module.all.zos_data_set(name="BLOCKIF.TEST.BACKUP", state="absent") - ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") - else: + if backup_ds_name != "": ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") From 04439922134c6065e8e8e9430860995e41fee800 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 14 Nov 2023 10:45:00 -0600 Subject: [PATCH 223/495] Removed unused imports --- plugins/module_utils/system.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/module_utils/system.py b/plugins/module_utils/system.py index 5a452a48a..54ec90dca 100644 --- a/plugins/module_utils/system.py +++ b/plugins/module_utils/system.py @@ -16,10 +16,10 @@ from os import name as OS_NAME from sys import platform as SYS_PLATFORM # from subprocess import Popen, PIPE -from ansible.module_utils.six import binary_type, text_type, PY2, PY3 +# from ansible.module_utils.six import binary_type, text_type, PY2, PY3 # from ansible.module_utils._text import to_text, to_bytes -from ansible.module_utils.common.text.converters import to_bytes, to_text -from shlex import split +# from ansible.module_utils.common.text.converters import to_bytes, to_text +# from shlex import split NIX_PLATFORMS = frozenset({ From b100cd928e89556e7585f5e4e25e50373c9d9258 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 15 Nov 2023 12:59:55 -0500 Subject: [PATCH 224/495] 1043 bug title zos operator is passing wrong value to zoauopercmd (#1044) * corrected kwarg index value from 'wait_arg' to 'wait' Also corrected true/false issue in zoaq * Added and updated changelog. * update PR number in changelog fragment * changed test from \$ to \\$ to eliminate warning * added blocking test to maks sure minimum wait is reached in zoau>1.2.4.5 * removed the else condition from the blocking test, since it is not needed. * corrected tense grammer in changelog fragment * corrected capitalization of ZOAU in changelog fragment. --- ...-is-passing-wrong-value-to-zoauopercmd.yml | 8 +++++ plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 2 +- .../modules/test_zos_operator_func.py | 31 +++++++++++++++---- 4 files changed, 35 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml diff --git a/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml b/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml new file mode 100644 index 000000000..06f9a264a --- /dev/null +++ b/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml @@ -0,0 +1,8 @@ +bugfixes: + - zos_operator - The module was ignoring the wait time argument. + The module now passes the wait time argument to ZOAU. + (https://github.com/ansible-collections/ibm_zos_core/pull/1044). + + - zos_operator_action_query - The module was ignoring the wait time argument. + The module now passes the wait time argument to ZOAU. + (https://github.com/ansible-collections/ibm_zos_core/pull/1044). \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 2d1fb807f..969890ba5 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -283,7 +283,7 @@ def run_operator_command(params): use_wait_arg = True if use_wait_arg: - kwargs.update({"wait_arg": True}) + kwargs.update({"wait": True}) args = [] rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index a06535763..ccf565626 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -272,7 +272,7 @@ def run_module(): use_wait_arg = True if use_wait_arg: - kwargs.update({"wait_arg": False}) + kwargs.update({"wait": True}) args = [] diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 5aebe2a9c..6891cffa8 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -23,6 +23,11 @@ import pytest from pprint import pprint +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + zoau_version_checker +) + + __metaclass__ = type @@ -103,13 +108,9 @@ def test_zos_operator_positive_verbose_with_full_delay(ansible_zos_module): def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): hosts = ansible_zos_module wait_time_s=10 - #startmod = time.time() results = hosts.all.zos_operator( cmd="d u,all", verbose=True, wait_time_s=wait_time_s ) - # endmod = time.time() - # timediff = endmod - startmod - # assert timediff < 15 for result in results.contacted.values(): assert result["rc"] == 0 @@ -119,13 +120,31 @@ def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): assert result.get('elapsed') <= (2 * wait_time_s) +def test_zos_operator_positive_verbose_blocking(ansible_zos_module): + if zoau_version_checker.is_zoau_version_higher_than("1.2.4.5"): + hosts = ansible_zos_module + wait_time_s=5 + results = hosts.all.zos_operator( + cmd="d u,all", verbose=True, wait_time_s=wait_time_s + ) + + for result in results.contacted.values(): + assert result["rc"] == 0 + assert result.get("changed") is True + assert result.get("content") is not None + # Account for slower network + assert result.get('elapsed') >= wait_time_s + + + def test_response_come_back_complete(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_operator(cmd="\$dspl") + results = hosts.all.zos_operator(cmd="\\$dspl") res = dict() res["stdout"] = [] for result in results.contacted.values(): stdout = result.get('content') # HASP646 Only appears in the last line that before did not appears last_line = len(stdout) - assert "HASP646" in stdout[last_line - 1] \ No newline at end of file + assert "HASP646" in stdout[last_line - 1] + From 3c1f0a4c5bfa7a57bc08874fde0a2557af27d092 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 15 Nov 2023 13:00:56 -0500 Subject: [PATCH 225/495] Bug 1041 zos submit job honor return output literally (#1058) * initial commit to pass return_output to job_output. * corrected fragment name to match branch * tweaked data set test to show result values if positive test fails * removed trace in zos_data_set, and added trace output to job_submit * removed extra text from functional testing. * put in correct PR number in changelog fragment. * changed trivial to minor_changes, added documentation to dd_scan in job:job_output. --- ...41-bug-zos-submit-job-honor-return-output-literally.yml | 4 ++++ plugins/module_utils/job.py | 7 ++++--- plugins/modules/zos_job_submit.py | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml diff --git a/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml b/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml new file mode 100644 index 000000000..726397d2d --- /dev/null +++ b/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_submit_job - Previous code did not return output, but still requested job data from the target system. + This changes to honor return_output=false by not querying the job dd segments at all. + (https://github.com/ansible-collections/ibm_zos_core/pull/1058). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index cfe8c4a67..bf23bf5bc 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -36,7 +36,7 @@ ) -def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, timeout=0, start_time=timer()): +def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. Keyword Arguments: @@ -44,6 +44,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, owner (str) -- The owner of the job (default: {None}) job_name (str) -- The job name search for (default: {None}) dd_name (str) -- The data definition to retrieve (default: {None}) + dd_scan (bool) - Whether or not to pull information from the dd's for this job {default: {True}} duration (int) -- The time the submitted job ran for timeout (int) - how long to wait in seconds for a job to complete start_time (int) - time the JCL started its submission @@ -70,7 +71,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, dd_name = parsed_args.get("dd_name") or "" job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, - dd_name=dd_name, duration=duration, timeout=timeout, start_time=start_time) + dd_name=dd_name, duration=duration, dd_scan=dd_scan, timeout=timeout, start_time=start_time) # while ((job_detail is None or len(job_detail) == 0) and duration <= timeout): # current_time = timer() @@ -83,7 +84,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, duration=0, owner = "" if owner == "*" else owner job_name = "" if job_name == "*" else job_name job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, - dd_name=dd_name, duration=duration, timeout=timeout, start_time=start_time) + dd_name=dd_name, dd_scan=dd_scan, duration=duration, timeout=timeout, start_time=start_time) return job_detail diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index efdbd07d6..11f0f3ccb 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -959,7 +959,7 @@ def run_module(): job_output_txt = job_output( job_id=job_submitted_id, owner=None, job_name=None, dd_name=None, - duration=duration, timeout=wait_time_s, start_time=start_time) + dd_scan=return_output, duration=duration, timeout=wait_time_s, start_time=start_time) result["duration"] = duration From ec737c3beaa102ee3cccf2852c6812e74121d460 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Thu, 23 Nov 2023 09:36:57 -0700 Subject: [PATCH 226/495] Enabler/1024/remote_tmp for zos_script (#1060) * Changed tmp_path for Ansible's remote_tmp * Remove tmp_path from module's options * Update module documentation * Remove tmp_path test case * Update zos_script's RST file * Add changelog fragment * Updated module examples --- .../fragments/1060-remote_tmp_zos_script.yml | 5 +++ docs/source/modules/zos_script.rst | 20 +++-------- plugins/action/zos_script.py | 4 ++- plugins/modules/zos_script.py | 23 ++++-------- .../modules/test_zos_script_func.py | 35 ------------------- 5 files changed, 20 insertions(+), 67 deletions(-) create mode 100644 changelogs/fragments/1060-remote_tmp_zos_script.yml diff --git a/changelogs/fragments/1060-remote_tmp_zos_script.yml b/changelogs/fragments/1060-remote_tmp_zos_script.yml new file mode 100644 index 000000000..1185f3a1b --- /dev/null +++ b/changelogs/fragments/1060-remote_tmp_zos_script.yml @@ -0,0 +1,5 @@ +minor_changes: + - zos_script - add support for remote_tmp from the Ansible + configuration to setup where temporary files will be created, + replacing the module option tmp_path. + (https://github.com/ansible-collections/ibm_zos_core/pull/1060). \ No newline at end of file diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index 29d9bb2df..bc8dff3c0 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -102,19 +102,6 @@ removes | **type**: str -tmp_path - Directory path in the remote machine where local scripts will be temporarily copied to. - - When not specified, the module will copy local scripts to the default temporary path for the user. - - If ``tmp_path`` does not exist in the remote machine, the module will not create it. - - All scripts copied to ``tmp_path`` will be removed from the managed node before the module finishes executing. - - | **required**: False - | **type**: str - - use_template Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. @@ -264,11 +251,10 @@ Examples remote_src: true chdir: /u/user/output_dir - - name: Run a local Python script that uses a custom tmp_path. + - name: Run a local Python script in the temporary directory specified in the Ansible environment variable 'remote_tmp'. zos_script: cmd: ./scripts/program.py executable: /usr/bin/python3 - tmp_path: /usr/tmp/ibm_zos_core - name: Run a local script made from a template. zos_script: @@ -294,6 +280,10 @@ Notes .. note:: When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. + The location in the z/OS system where local scripts will be copied to can be configured through Ansible's ``remote_tmp`` option. Refer to `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`_ for more information. + + All local scripts copied to a remote z/OS system will be removed from the managed node before the module finishes executing. + Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for remote scripts will be restored by the module before the task ends. The module will only add execution permissions for the file owner. diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py index a17934ac4..36345810b 100644 --- a/plugins/action/zos_script.py +++ b/plugins/action/zos_script.py @@ -56,7 +56,9 @@ def run(self, tmp=None, task_vars=None): if not remote_src: script_path = path.abspath(path.normpath(script_path)) script_name = path.basename(script_path) - tmp_path = module_args.get('tmp_path') + # Accessing the globally-defined temporary directory + # that Ansible expects to be used. + tmp_path = self._connection._shell._options.get("remote_tmp") # Getting a temporary path for the script. tempfile_args = dict( diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index 15699c4a1..b69d70b2d 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -92,18 +92,6 @@ script will not be executed. type: str required: false - tmp_path: - description: - - Directory path in the remote machine where local scripts will be - temporarily copied to. - - When not specified, the module will copy local scripts to - the default temporary path for the user. - - If C(tmp_path) does not exist in the remote machine, the - module will not create it. - - All scripts copied to C(tmp_path) will be removed from the managed - node before the module finishes executing. - type: str - required: false extends_documentation_fragment: - ibm.ibm_zos_core.template @@ -112,6 +100,12 @@ - When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. + - The location in the z/OS system where local scripts will be copied to can be + configured through Ansible's C(remote_tmp) option. Refer to + L(Ansible's documentation,https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp) + for more information. + - All local scripts copied to a remote z/OS system will be removed from the + managed node before the module finishes executing. - Execution permissions for the group assigned to the script will be added to remote scripts. The original permissions for remote scripts will be restored by the module before the task ends. @@ -154,11 +148,10 @@ remote_src: true chdir: /u/user/output_dir -- name: Run a local Python script that uses a custom tmp_path. +- name: Run a local Python script in the temporary directory specified in the Ansible environment variable 'remote_tmp'. zos_script: cmd: ./scripts/program.py executable: /usr/bin/python3 - tmp_path: /usr/tmp/ibm_zos_core - name: Run a local script made from a template. zos_script: @@ -251,7 +244,6 @@ def run_module(): executable=dict(type='str', required=False), remote_src=dict(type='bool', required=False), removes=dict(type='str', required=False), - tmp_path=dict(type='str', required=False), use_template=dict(type='bool', default=False), template_parameters=dict( type='dict', @@ -287,7 +279,6 @@ def run_module(): executable=dict(arg_type='path', required=False), remote_src=dict(arg_type='bool', required=False), removes=dict(arg_type='path', required=False), - tmp_path=dict(arg_type='path', required=False), use_template=dict(arg_type='bool', required=False), template_parameters=dict( arg_type='dict', diff --git a/tests/functional/modules/test_zos_script_func.py b/tests/functional/modules/test_zos_script_func.py index 2bdae2a66..8bc310fe5 100644 --- a/tests/functional/modules/test_zos_script_func.py +++ b/tests/functional/modules/test_zos_script_func.py @@ -237,41 +237,6 @@ def test_rexx_script_chdir(ansible_zos_module): hosts.all.file(path=tmp_remote_dir, state='absent') -def test_rexx_script_tmp_path(ansible_zos_module): - import os - - hosts = ansible_zos_module - - try: - rexx_script = create_script_content('tmp_path test', 'rexx') - script_path = create_local_file(rexx_script, 'rexx') - - tmp_remote_dir = '/tmp/zos_script_tests' - file_result = hosts.all.file( - path=tmp_remote_dir, - state='directory' - ) - - for result in file_result.contacted.values(): - assert result.get('changed') is True - - zos_script_result = hosts.all.zos_script( - cmd=script_path, - tmp_path=tmp_remote_dir - ) - - for result in zos_script_result.contacted.values(): - assert result.get('changed') is True - assert result.get('failed', False) is False - assert result.get('rc') == 0 - assert result.get('stderr', '') == '' - assert tmp_remote_dir in result.get('remote_cmd', '') - finally: - if os.path.exists(script_path): - os.remove(script_path) - hosts.all.file(path=tmp_remote_dir, state='absent') - - def test_python_script(ansible_zos_module): import os From b17dad3ffed08f5653ea8cc1eabbdedd911460a3 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 23 Nov 2023 11:13:19 -0600 Subject: [PATCH 227/495] [zos_copy] Files corrupted after second copy (#1064) * Initial change to replace shutil.copy * Added fix for corrupted directory copies * Added changelog fragment * Modified docstring and fixed copy_tree * Added punctiation * Added copystat * Added set mode for dirs * Update 1064-corruped-second-copy.yml --- .../fragments/1064-corruped-second-copy.yml | 5 ++ plugins/modules/zos_copy.py | 70 ++++++++++++++++++- 2 files changed, 73 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1064-corruped-second-copy.yml diff --git a/changelogs/fragments/1064-corruped-second-copy.yml b/changelogs/fragments/1064-corruped-second-copy.yml new file mode 100644 index 000000000..82a04426e --- /dev/null +++ b/changelogs/fragments/1064-corruped-second-copy.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - When performing a copy operation to an existing file, the copied + file resulted in having corrupted contents. Fix now implements a workaround + to not use the specific copy routine that corrupts the file contents. + (https://github.com/ansible-collections/ibm_zos_core/pull/1064). diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index d6559e793..dbed382f2 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -953,6 +953,64 @@ def copy_to_vsam(self, src, dest): cmd=repro_cmd, ) + def _copy_tree(self, entries, src, dest, dirs_exist_ok=False): + """Recursively copy USS directory to another USS directory. + This function was created to circumvent using shutil.copytree + as it presented the issue of corrupting file contents after second copy + because the use of shutil.copy2. This issue is only present in + Python 3.11 and 3.12. + + Arguments: + entries {list} -- List of files under src directory. + src_dir {str} -- USS source directory. + dest_dir {str} -- USS dest directory. + dirs_exist_ok {bool} -- Whether to copy files to an already existing directory. + + Raises: + Exception -- When copying into the directory fails. + + Returns: + {str } -- Destination directory that was copied. + """ + os.makedirs(dest, exist_ok=dirs_exist_ok) + for src_entry in entries: + src_name = os.path.join(validation.validate_safe_path(src), validation.validate_safe_path(src_entry.name)) + dest_name = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_entry.name)) + try: + if src_entry.is_symlink(): + link_to = os.readlink(src_name) + os.symlink(link_to, dest_name) + shutil.copystat(src_name, dest_name, follow_symlinks=True) + elif src_entry.is_dir(): + self.copy_tree(src_name, dest_name, dirs_exist_ok=dirs_exist_ok) + else: + opts = dict() + opts["options"] = "" + response = datasets._copy(src_name, dest_name, None, **opts) + if response.rc > 0: + raise Exception(response.stderr_response) + shutil.copystat(src_name, dest_name, follow_symlinks=True) + except Exception as err: + raise err + + return dest + + def copy_tree(self, src_dir, dest_dir, dirs_exist_ok=False): + """ + Copies a USS directory into another USS directory. + + Arguments: + src_dir {str} -- USS source directory. + dest_dir {str} -- USS dest directory. + dirs_exist_ok {bool} -- Whether to copy files to an already existing directory. + + Returns: + {str} -- Destination directory that was copied. + """ + with os.scandir(src_dir) as itr: + entries = list(itr) + return self._copy_tree(entries, src_dir, dest_dir, dirs_exist_ok=dirs_exist_ok) + def convert_encoding(self, src, temp_path, encoding): """Convert encoding for given src @@ -1258,6 +1316,7 @@ def copy_to_uss( if not os.path.isdir(dest): self.module.set_mode_if_different(dest, mode, False) if changed_files: + self.module.set_mode_if_different(dest, mode, False) for filepath in changed_files: self.module.set_mode_if_different( os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(filepath)), mode, False @@ -1293,7 +1352,13 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): if self.is_binary: copy.copy_uss2uss_binary(new_src, dest) else: - shutil.copy(new_src, dest) + opts = dict() + opts["options"] = "" + response = datasets._copy(new_src, dest, None, **opts) + if response.rc > 0: + raise Exception(response.stderr_response) + shutil.copystat(new_src, dest, follow_symlinks=True) + # shutil.copy(new_src, dest) if self.executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) @@ -1353,7 +1418,8 @@ def _copy_to_dir( try: if copy_directory: dest = os.path.join(validation.validate_safe_path(dest_dir), validation.validate_safe_path(os.path.basename(os.path.normpath(src_dir)))) - dest = shutil.copytree(new_src_dir, dest, dirs_exist_ok=force) + # dest = shutil.copytree(new_src_dir, dest, dirs_exist_ok=force) + dest = self.copy_tree(new_src_dir, dest, dirs_exist_ok=force) # Restoring permissions for preexisting files and subdirectories. for filepath, permissions in original_permissions: From 7800b6ac96426d6d875252c43999a005098f792a Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Mon, 27 Nov 2023 11:51:02 -0700 Subject: [PATCH 228/495] [Documentation] [zos_tso_command] Add REXX exec example (#1065) * Add REXX exec example * Add fragment * Update module documentation * Fix PR link * Reword example task name * Updated REXX example --- changelogs/fragments/1065-rexx-exec-tso_command.yml | 4 ++++ docs/source/modules/zos_tso_command.rst | 4 ++++ plugins/modules/zos_tso_command.py | 4 ++++ 3 files changed, 12 insertions(+) create mode 100644 changelogs/fragments/1065-rexx-exec-tso_command.yml diff --git a/changelogs/fragments/1065-rexx-exec-tso_command.yml b/changelogs/fragments/1065-rexx-exec-tso_command.yml new file mode 100644 index 000000000..5d20ccfd6 --- /dev/null +++ b/changelogs/fragments/1065-rexx-exec-tso_command.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_tso_command - add example for executing explicitly a REXX script from + a data set. + (https://github.com/ansible-collections/ibm_zos_core/pull/1065). diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index d11cc8a98..846cb93d8 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -71,6 +71,10 @@ Examples - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC max_rc: 4 + - name: Execute TSO command to run explicitly a REXX script from a data set. + zos_tso_command: + commands: + - EXEC HLQ.DATASET.REXX exec diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index e3c4c6f12..87b157318 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -111,6 +111,10 @@ - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC max_rc: 4 +- name: Execute TSO command to run a REXX script explicitly from a data set. + zos_tso_command: + commands: + - EXEC HLQ.DATASET.REXX exec """ from ansible.module_utils.basic import AnsibleModule From 44754ab9a527c77524f56bf18e11880cde8b6c1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 27 Nov 2023 16:38:05 -0600 Subject: [PATCH 229/495] Mvs to non existent mvs copy destination attrs match up (#1066) * Fixed error * Fix identation * Add fragment * Modify fragment * Modify fragment * Modify fragment --- ...nt_mvs_copy_destination_attrs_match_up.yml | 5 +++++ plugins/module_utils/data_set.py | 8 +++++++- plugins/modules/zos_copy.py | 20 +------------------ 3 files changed, 13 insertions(+), 20 deletions(-) create mode 100644 changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml diff --git a/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml b/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml new file mode 100644 index 000000000..05e1c9ce4 --- /dev/null +++ b/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - When copying an executable data set with aliases and destination did not exist, + destination data set was created with wrong attributes. Fix now creates destination data set + with the same attributes as the source. + (https://github.com/ansible-collections/ibm_zos_core/pull/1066). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index ab7a3c3c8..cae505804 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -278,7 +278,7 @@ def ensure_uncataloged(name): return False @staticmethod - def allocate_model_data_set(ds_name, model, asa_text=False, vol=None): + def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None): """Allocates a data set based on the attributes of a 'model' data set. Useful when a data set needs to be created identical to another. Supported model(s) are Physical Sequential (PS), Partitioned Data Sets (PDS/PDSE), @@ -291,6 +291,7 @@ def allocate_model_data_set(ds_name, model, asa_text=False, vol=None): must be used. See extract_dsname(ds_name) in data_set.py model {str} -- The name of the data set whose allocation parameters should be used to allocate the new data set 'ds_name' + executable {bool} -- Whether the new data set should support executables asa_text {bool} -- Whether the new data set should support ASA control characters (have record format FBA) vol {str} -- The volume where data set should be allocated @@ -327,6 +328,11 @@ def allocate_model_data_set(ds_name, model, asa_text=False, vol=None): alloc_cmd = """{0} - RECFM(F,B,A)""".format(alloc_cmd) + if executable: + alloc_cmd = """{0} - + RECFM(U) - + DSNTYPE(LIBRARY)""".format(alloc_cmd) + rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True) if rc != 0: raise MVSCmdExecError(rc, out, err) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index dbed382f2..a2e545d8b 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2403,25 +2403,7 @@ def allocate_destination_data_set( elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED and not dest_exists: # Taking the src as model if it's also a PDSE. if src_ds_type in data_set.DataSet.MVS_PARTITIONED: - if executable: - src_attributes = datasets.listing(src_name)[0] - size = int(src_attributes.total_space) - record_format = "U" - record_length = 0 - - dest_params = get_data_set_attributes( - dest, - size, - is_binary, - asa_text, - record_format=record_format, - record_length=record_length, - type="LIBRARY", - volume=volume - ) - data_set.DataSet.ensure_present(replace=force, **dest_params) - else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.listing(src_name)[0] # The size returned by listing is in bytes. From 5c520cbee7cf99646a006fef41709b8844c0beb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Sat, 2 Dec 2023 16:55:39 -0600 Subject: [PATCH 230/495] Bugfix/374/module zos mvs raw errors with long multi line quoted string in content field (#1057) * Add function of write content * Push easy soultion for two cases * Fix identation and more issues * Fix identation and more issues * Solve error of null * Add validation comments and separete the code * Add fragment * Modify logics * Return overthink * Add explanation for the user and change logic * Add explanation for the user and change logic * Change documentation * Change fragment * Better error message, better documentation and fragment * Get better mesages * Change the logic * Change documentation * Change logic * Add scape to # * Check failing * Check failing * Add valid scapes * Update zos_mvs_raw fragment and module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- ...ti_line_quoted_string_in_content_field.yml | 12 ++ docs/source/modules/zos_mvs_raw.rst | 43 ++++++- plugins/modules/zos_mvs_raw.py | 115 +++++++++++++++++- 3 files changed, 162 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml diff --git a/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml b/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml new file mode 100644 index 000000000..49a3a3516 --- /dev/null +++ b/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml @@ -0,0 +1,12 @@ +minor_changes: + - zos_mvs_raw - when using the dd_input content option for instream-data, if + the content was not properly indented according to the program which is + generally a blank in columns 1 & 2, those columns would be truncated. Now, + when setting instream-data, the module will ensure that all lines contain + a blank in columns 1 and 2 and add blanks when not present while retaining + a maximum length of 80 columns for any line. This is true for all content + types; string, list of strings and when using a YAML block indicator. + (https://github.com/ansible-collections/ibm_zos_core/pull/1057). + - zos_mvs_raw - no examples were included with the module that demonstrated + using a YAML block indicator, this now includes examples using a YAML + block indicator. diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index 33247a3e9..fb6a1a726 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -655,10 +655,12 @@ dds Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. - If a multi-line string is provided make sure to use the proper literal block style indicator "|". - If a list of strings is provided, newlines will be added to each of the lines when used as input. + If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. + + When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. + | **required**: True | **type**: raw @@ -1371,10 +1373,12 @@ dds Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. - If a multi-line string is provided make sure to use the proper literal block style indicator "|". - If a list of strings is provided, newlines will be added to each of the lines when used as input. + If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. + + When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. + | **required**: True | **type**: raw @@ -1708,6 +1712,35 @@ Examples return_content: type: text + - name: Define a cluster using a literal block style indicator + with a 2 space indentation. + zos_mvs_raw: + program_name: idcams + auth: yes + dds: + - dd_output: + dd_name: sysprint + return_content: + type: text + - dd_input: + dd_name: sysin + content: |2 + DEFINE CLUSTER - + (NAME(ANSIBLE.TEST.VSAM) - + CYL(10 10) - + FREESPACE(20 20) - + INDEXED - + KEYS(32 0) - + NOERASE - + NONSPANNED - + NOREUSE - + SHAREOPTIONS(3 3) - + SPEED - + UNORDERED - + RECORDSIZE(4086 32600) - + VOLUMES(222222) - + UNIQUE) + @@ -1721,6 +1754,8 @@ Notes 2. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. + 3. When executing a program, refer to the programs documentation as each programs requirments can vary fom DDs, instream-data indentation and continuation characters. + See Also diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 55937ea63..4eab2b023 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -556,10 +556,25 @@ - I(dd_input) supports single or multiple lines of input. - Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. - - If a multi-line string is provided make sure to use the - proper literal block style indicator "|". - If a list of strings is provided, newlines will be added to each of the lines when used as input. + - 'If a multi-line string is provided, use the proper block scalar + style. YAML supports both + L(literal,https://yaml.org/spec/1.2.2/#literal-style) and + L(folded,https://yaml.org/spec/1.2.2/#line-folding) scalars. + It is recommended to use the literal style indicator + "|" with a block indentation indicator, for example; + I(content: | 2) is a literal block style indicator with a 2 space + indentation, the entire block will be indented and newlines + preserved. The block indentation range is 1 - 9. While generally + unnecessary, YAML does support block + L(chomping,https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator) + indicators "+" and "-" as well.' + - When using the I(content) option for instream-data, the module + will ensure that all lines contain a blank in columns 1 and 2 + and add blanks when not present while retaining a maximum length + of 80 columns for any line. This is true for all I(content) types; + string, list of strings and when using a YAML block indicator. required: true type: raw return_content: @@ -1155,10 +1170,25 @@ - I(dd_input) supports single or multiple lines of input. - Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. - - If a multi-line string is provided make sure to use the - proper literal block style indicator "|". - If a list of strings is provided, newlines will be added to each of the lines when used as input. + - 'If a multi-line string is provided, use the proper block scalar + style. YAML supports both + L(literal,https://yaml.org/spec/1.2.2/#literal-style) and + L(folded,https://yaml.org/spec/1.2.2/#line-folding) scalars. + It is recommended to use the literal style indicator + "|" with a block indentation indicator, for example; + I(content: | 2) is a literal block style indicator with a 2 space + indentation, the entire block will be indented and newlines + preserved. The block indentation range is 1 - 9. While generally + unnecessary, YAML does support block + L(chomping,https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator) + indicators "+" and "-" as well.' + - When using the I(content) option for instream-data, the module + will ensure that all lines contain a blank in columns 1 and 2 + and add blanks when not present while retaining a maximum length + of 80 columns for any line. This is true for all I(content) types; + string, list of strings and when using a YAML block indicator. required: true type: raw return_content: @@ -1208,6 +1238,8 @@ - 2. L(zos_mvs_raw,./zos_mvs_raw.html) module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. + - 3. When executing a program, refer to the programs documentation as each programs requirments + can vary fom DDs, instream-data indentation and continuation characters. seealso: - module: zos_data_set """ @@ -1522,6 +1554,35 @@ dd_name: sysprint return_content: type: text + + - name: Define a cluster using a literal block style indicator + with a 2 space indentation. + zos_mvs_raw: + program_name: idcams + auth: yes + dds: + - dd_output: + dd_name: sysprint + return_content: + type: text + - dd_input: + dd_name: sysin + content: |2 + DEFINE CLUSTER - + (NAME(ANSIBLE.TEST.VSAM) - + CYL(10 10) - + FREESPACE(20 20) - + INDEXED - + KEYS(32 0) - + NOERASE - + NONSPANNED - + NOREUSE - + SHAREOPTIONS(3 3) - + SPEED - + UNORDERED - + RECORDSIZE(4086 32600) - + VOLUMES(222222) - + UNIQUE) """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( @@ -2166,6 +2227,11 @@ def dd_content(contents, dependencies): """ if contents is None: return None + if contents is not None: + # Empty string can be passed for content but not modify to ensure proper entry + if len(contents) > 0: + contents = modify_contents(contents) + return contents if isinstance(contents, list): return "\n".join(contents) return contents @@ -3090,6 +3156,47 @@ def get_content(formatted_name, binary=False, from_encoding=None, to_encoding=No return stdout +def modify_contents(contents): + """Return the content of dd_input to a valid form for a JCL program. + + Args: + contents (str or list): The string or list with the program. + + Returns: + contents: The content in a proper multi line str. + """ + if not isinstance(contents, list): + contents = list(contents.split("\n")) + contents = prepend_spaces(contents) + contents = "\n".join(contents) + return contents + + +def prepend_spaces(lines): + """Return the array with two spaces at the beggining. + + Args: + lines (list): The list with a line of a program. + + Returns: + new_lines: The list in a proper two spaces and the code. + """ + module = AnsibleModuleHelper(argument_spec={}) + for index, line in enumerate(lines): + if len(line) > 0: + if len(line) > 80: + module.fail_json(msg="""Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """.format(line)) + else: + if len(line) > 1 and line[0] != " " and line[1] != " ": + if len(line) > 78: + module.fail_json(msg="""Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """.format(line)) + else: + lines[index] = " {0}".format(line) + return lines + + class ZOSRawError(Exception): def __init__(self, program="", error=""): self.msg = "An error occurred during execution of z/OS program {0}. {1}".format( From d0cb7e87c31d348679194635b0ff6aba6e292f95 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 4 Dec 2023 14:27:57 -0600 Subject: [PATCH 231/495] Remove tarfile.exctractall and zipfile.extractall in favor of individual member extraction for sanity (#1077) * Implemented extract_all function * Added changelog * Removed commented lines --- changelogs/fragments/1077-modify-uss-extraction.yml | 3 +++ plugins/modules/zos_unarchive.py | 8 ++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1077-modify-uss-extraction.yml diff --git a/changelogs/fragments/1077-modify-uss-extraction.yml b/changelogs/fragments/1077-modify-uss-extraction.yml new file mode 100644 index 000000000..0886dfab1 --- /dev/null +++ b/changelogs/fragments/1077-modify-uss-extraction.yml @@ -0,0 +1,3 @@ +trivial: + - zos_unarchive - Change the USS file extraction method from extractall to a custom function to extract filtered members. + (https://github.com/ansible-collections/ibm_zos_core/pull/1077). \ No newline at end of file diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 81737ed29..9ab1409ca 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -462,6 +462,10 @@ def result(self): 'missing': self.missing, } + def extract_all(self, members): + for member in members: + self.file.extract(member) + class TarUnarchive(Unarchive): def __init__(self, module): @@ -527,7 +531,7 @@ def extract_src(self): self.file.extract(path) self.targets.append(path) else: - self.file.extractall(members=sanitize_members(self.file.getmembers(), self.dest, self.format)) + self.extract_all(members=sanitize_members(self.file.getmembers(), self.dest, self.format)) self.targets = files_in_archive self.file.close() # Returning the current working directory to what it was before to not @@ -598,7 +602,7 @@ def extract_src(self): self.file.extract(path) self.targets.append(path) else: - self.file.extractall(members=sanitize_members(self.file.infolist(), self.dest, self.format)) + self.extract_all(members=sanitize_members(self.file.infolist(), self.dest, self.format)) self.targets = files_in_archive self.file.close() # Returning the current working directory to what it was before to not From e6bda1b32d1b5dbcda96355bbb64694873aaebcf Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 4 Dec 2023 13:12:13 -0800 Subject: [PATCH 232/495] Update ac tool version of pyyaml to avoid wheel issue Signed-off-by: ddimatos <dimatos@gmail.com> --- scripts/requirements-common.env | 4 ++-- scripts/venv.sh | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/requirements-common.env b/scripts/requirements-common.env index 365b8aa4f..5f76436bf 100644 --- a/scripts/requirements-common.env +++ b/scripts/requirements-common.env @@ -67,7 +67,7 @@ requirements=( "oyaml" "Parsley" "PyNaCl" -"PyYAML" +"PyYAML:6.0.1" "Pygments" "packaging" "paramiko" @@ -130,4 +130,4 @@ requirements=( # "oyaml" # "mock" # "pytest-ansible" -# ) \ No newline at end of file +# ) diff --git a/scripts/venv.sh b/scripts/venv.sh index 5ec946c49..315e7a854 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -280,6 +280,8 @@ create_venv_and_pip_install_req(){ find_in_path() { result="" + OTHER_PYTHON_PATHS="/Library/Frameworks/Python.framework/Versions/Current/bin:/opt/homebrew/bin:" + PATH="${OTHER_PYTHON_PATHS}${PATH}" IFS=: for x in $PATH; do if [ -x "$x/$1" ]; then @@ -295,7 +297,7 @@ find_in_path() { discover_python(){ # Don't use which, it only will find first in path within script # for python_found in `which python3 | cut -d" " -f3`; do - pys=("python3.8" "python3.9" "python3.10" "python3.11" "python3.12" "python3.13" "python3.14") + pys=("python3" "python3.8" "python3.9" "python3.10" "python3.11" "python3.12" "python3.13" "python3.14") #pys=("python3.8" "python3.9") for py in "${pys[@]}"; do for python_found in `find_in_path $py`; do From e1101237c4e8dd670c0a4765da684fa5f25709fb Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 4 Dec 2023 13:25:22 -0800 Subject: [PATCH 233/495] Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/1048-update-ac-tool-pyyaml-version.yml | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml diff --git a/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml b/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml new file mode 100644 index 000000000..309862cfb --- /dev/null +++ b/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml @@ -0,0 +1,8 @@ +trivial: + - ac - PyYaml version 5.4.1 was being installed and not having a wheel to go + with the python versions 11 and 12. This fixes the issue by freezing the + the version to 6.0.1. + - ac - would not discover python installations not in PATH. This change + extends the search path to include common python installation locations + not in path. + (https://github.com/ansible-collections/ibm_zos_core/pull/1083). From 1232079e6ee5cbc4d3ece06f484dc90711a9dba1 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 4 Dec 2023 16:09:16 -0600 Subject: [PATCH 234/495] Fixed sanity issues with zos_mvs_raw (#1084) --- plugins/modules/zos_mvs_raw.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 4eab2b023..502d2ead7 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -1239,7 +1239,7 @@ "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. - 3. When executing a program, refer to the programs documentation as each programs requirments - can vary fom DDs, instream-data indentation and continuation characters. + can vary fom DDs, instream-data indentation and continuation characters. seealso: - module: zos_data_set """ @@ -3185,13 +3185,15 @@ def prepend_spaces(lines): for index, line in enumerate(lines): if len(line) > 0: if len(line) > 80: - module.fail_json(msg="""Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. - If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """.format(line)) + msg = """Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """ + module.fail_json(msg=msg.format(line)) else: if len(line) > 1 and line[0] != " " and line[1] != " ": if len(line) > 78: - module.fail_json(msg="""Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. - If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """.format(line)) + msg = """Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """ + module.fail_json(msg=msg.format(line)) else: lines[index] = " {0}".format(line) return lines From 5abdfd5077de5aacc7efc3f818e3a8eb96d9cbcf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 5 Dec 2023 23:07:04 -0600 Subject: [PATCH 235/495] [bugfix][v1.9.0][zos_unarchive]action plugin does not clean up remote temporary files after completion (#1073) * Push solution * Add fragment * Modify logic for fails and ensure works * Change bug * Chamge fragment * Chamge fragment * Add comma --------- Co-authored-by: Demetri <dimatos@gmail.com> --- ...up_remote_temporary_files_after_completion.yml | 4 ++++ plugins/action/zos_unarchive.py | 15 ++++++++++++++- 2 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml diff --git a/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml b/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml new file mode 100644 index 000000000..6532e60ae --- /dev/null +++ b/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. + Change now removes temporary files. + (https://github.com/ansible-collections/ibm_zos_core/pull/1073). \ No newline at end of file diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index d808647ef..6e679d62d 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -46,6 +46,9 @@ def run(self, tmp=None, task_vars=None): module_args = self._task.args.copy() + tmp_files = "" + uss_format = None + if module_args.get("remote_src", False): result.update( self._execute_module( @@ -67,9 +70,10 @@ def run(self, tmp=None, task_vars=None): source = os.path.realpath(source) if format_name in USS_SUPPORTED_FORMATS: - dest = self._execute_module( + tmp_files = dest = self._execute_module( module_name="tempfile", module_args={}, task_vars=task_vars, ).get("path") + uss_format = format_name elif format_name in MVS_SUPPORTED_FORMATS: if dest_data_set is None: dest_data_set = dict() @@ -120,4 +124,13 @@ def run(self, tmp=None, task_vars=None): ) else: result.update(dict(failed=True)) + + if not module_args.get("remote_src", False) and uss_format: + self._remote_cleanup(tmp_files) + return result + + def _remote_cleanup(self, tempfile_path): + """Removes the temporary file in a managed node created for a local + script.""" + self._connection.exec_command("rm -f {0}".format(tempfile_path)) From 0b8a919472768b531fdf44cf1baa07c0ac00f4b8 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 5 Dec 2023 23:11:02 -0600 Subject: [PATCH 236/495] [v1.9.0] Bugfix/837/missing ZOAU imports (#1042) * Added new missing import handler to zos_operator * Added new import handler to job util and zos_job_query * Added ZOAU check to zos_operator * Fixed sanity test issue * Fixed Python 2.7 sanity issue * Add changelog fragment --------- Co-authored-by: Demetri <dimatos@gmail.com> --- .../fragments/1042-missing-zoau-imports.yml | 10 +++++ plugins/module_utils/import_handler.py | 42 +++++++++++++++++++ plugins/module_utils/job.py | 31 +++++++++----- plugins/modules/zos_job_query.py | 3 +- plugins/modules/zos_operator.py | 15 +++++-- 5 files changed, 85 insertions(+), 16 deletions(-) create mode 100644 changelogs/fragments/1042-missing-zoau-imports.yml diff --git a/changelogs/fragments/1042-missing-zoau-imports.yml b/changelogs/fragments/1042-missing-zoau-imports.yml new file mode 100644 index 000000000..a91f6de48 --- /dev/null +++ b/changelogs/fragments/1042-missing-zoau-imports.yml @@ -0,0 +1,10 @@ +bugfixes: + - zos_job_query - The module handling ZOAU import errors obscured the + original traceback when an import error ocurred. Fix now passes correctly + the context to the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/1042). + + - zos_operator - The module handling ZOAU import errors obscured the + original traceback when an import error ocurred. Fix now passes correctly + the context to the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/1042). \ No newline at end of file diff --git a/plugins/module_utils/import_handler.py b/plugins/module_utils/import_handler.py index 3e774f53a..a7b41a619 100644 --- a/plugins/module_utils/import_handler.py +++ b/plugins/module_utils/import_handler.py @@ -27,6 +27,48 @@ def method(*args, **kwargs): return method +class ZOAUImportError(object): + """This class serves as a wrapper for any kind of error when importing + ZOAU. Since ZOAU is used by both modules and module_utils, we need a way + to alert the user when they're trying to use a function that couldn't be + imported properly. If we only had to deal with this in modules, we could + just validate that imports worked at the start of their main functions, + but on utils, we don't have an entry point where we can validate this. + Just raising an exception when trying the import would be better, but that + introduces a failure on Ansible sanity tests, so we can't do it. + + Instead, we'll replace what would've been a ZOAU library with this class, + and the moment ANY method gets called, we finally raise an exception. + """ + + def __init__(self, exception_traceback): + """When creating a new instance of this class, we save the traceback + from the original exception so that users have more context when their + task/code fails. The expected traceback is a string representation of + it, not an actual traceback object. By importing `traceback` from the + standard library and calling `traceback.format_exc()` we can + get this string. + """ + self.traceback = exception_traceback + + def __getattr__(self, name): + """This code is virtually the same from `MissingZOAUImport`. What we + do here is hijack all calls to any method from a missing ZOAU library + and instead return a method that will alert the user that there was + an error while importing ZOAU. + """ + def method(*args, **kwargs): + raise ImportError( + ( + "ZOAU is not properly configured for Ansible. Unable to import zoautil_py. " + "Ensure environment variables are properly configured in Ansible for use with ZOAU. " + "Complete traceback: {0}".format(self.traceback) + ) + ) + + return method + + class MissingImport(object): def __init__(self, import_name=""): self.import_name = import_name diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index bf23bf5bc..94a65d8c3 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -15,21 +15,30 @@ import fnmatch import re +import traceback from time import sleep from timeit import default_timer as timer from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + # MissingZOAUImport, + ZOAUImportError ) try: - from zoautil_py.jobs import read_output, list_dds, listing + # For files that import individual functions from a ZOAU module, + # we'll replace the imports to instead get the module. + # This way, we'll always make a call to the module, allowing us + # to properly get the exception we need and avoid the issue + # described in #837. + # from zoautil_py.jobs import read_output, list_dds, listing + from zoautil_py import jobs except Exception: - read_output = MissingZOAUImport() - list_dds = MissingZOAUImport() - listing = MissingZOAUImport() + # read_output = MissingZOAUImport() + # list_dds = MissingZOAUImport() + # listing = MissingZOAUImport() + jobs = ZOAUImportError(traceback.format_exc()) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( zoau_version_checker @@ -204,7 +213,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # jls output: owner=job[0], name=job[1], id=job[2], status=job[3], rc=job[4] # e.g.: OMVSADM HELLO JOB00126 JCLERR ? - # listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not + # jobs.listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] # creationdatetime=job[9] queueposition=job[10] @@ -217,13 +226,13 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T kwargs = { "job_id": job_id_temp, } - entries = listing(**kwargs) + entries = jobs.listing(**kwargs) while ((entries is None or len(entries) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - entries = listing(**kwargs) + entries = jobs.listing(**kwargs) if entries: for entry in entries: @@ -275,12 +284,12 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["duration"] = duration if dd_scan: - list_of_dds = list_dds(entry.id) + list_of_dds = jobs.list_dds(entry.id) while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - list_of_dds = list_dds(entry.id) + list_of_dds = jobs.list_dds(entry.id) job["duration"] = duration @@ -325,7 +334,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T tmpcont = None if "stepname" in single_dd: if "dataset" in single_dd: - tmpcont = read_output( + tmpcont = jobs.read_output( entry.id, single_dd["stepname"], single_dd["dataset"]) dd["content"] = tmpcont.split("\n") diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 283467766..cf94fa684 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -260,6 +260,7 @@ ) from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text import re @@ -287,7 +288,7 @@ def run_module(): jobs = None except Exception as e: - module.fail_json(msg=e, **result) + module.fail_json(msg=to_text(e), **result) result["jobs"] = jobs module.exit_json(**result) diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 969890ba5..273b0a867 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -144,14 +144,17 @@ sample: true """ +import traceback from timeit import default_timer as timer from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + # MissingZOAUImport, + ZOAUImportError ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( @@ -161,7 +164,7 @@ try: from zoautil_py import opercmd except Exception: - opercmd = MissingZOAUImport() + opercmd = ZOAUImportError(traceback.format_exc()) try: from zoautil_py import ZOAU_API_VERSION @@ -190,6 +193,10 @@ def run_module(): result = dict(changed=False) module = AnsibleModule(argument_spec=module_args, supports_check_mode=False) + # Checking that we can actually use ZOAU. + if isinstance(opercmd, ZOAUImportError): + module.fail_json(msg="An error ocurred while importing ZOAU: {0}".format(opercmd.traceback)) + try: new_params = parse_params(module.params) rc_message = run_operator_command(new_params) @@ -241,10 +248,10 @@ def run_module(): stderr_lines=str(error).splitlines() if error is not None else result["content"], changed=result["changed"],) except Error as e: - module.fail_json(msg=repr(e), **result) + module.fail_json(msg=to_text(e), **result) except Exception as e: module.fail_json( - msg="An unexpected error occurred: {0}".format(repr(e)), **result + msg="An unexpected error occurred: {0}".format(to_text(e)), **result ) module.exit_json(**result) From 5b4a3bbba15199666cf12992e2cb1ad87f0c99e6 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 5 Dec 2023 23:19:22 -0600 Subject: [PATCH 237/495] [Enhancement][zos_job_submit] Modify error messages to be clearer (#1074) * Improved error messages * Updated failure logic --------- Co-authored-by: Demetri <dimatos@gmail.com> --- .../1074-improve-job-submit-error-msgs.yml | 3 ++ plugins/action/zos_job_submit.py | 34 ++++++------------- 2 files changed, 14 insertions(+), 23 deletions(-) create mode 100644 changelogs/fragments/1074-improve-job-submit-error-msgs.yml diff --git a/changelogs/fragments/1074-improve-job-submit-error-msgs.yml b/changelogs/fragments/1074-improve-job-submit-error-msgs.yml new file mode 100644 index 000000000..769131a2b --- /dev/null +++ b/changelogs/fragments/1074-improve-job-submit-error-msgs.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_job_submit - Improve error messages in zos_job_submit to be clearer. + (https://github.com/ansible-collections/ibm_zos_core/pull/1074). diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 630ce7969..c28fcec76 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -57,15 +57,18 @@ def run(self, tmp=None, task_vars=None): source = self._task.args.get("src", None) # Get a temporary file on the managed node - dest_path = self._execute_module( - module_name="tempfile", module_args={}, task_vars=task_vars, - ).get("path") + tempfile = self._execute_module( + module_name="tempfile", module_args=dict(state="file"), task_vars=task_vars, + ) + dest_path = tempfile.get("path") result["failed"] = True - if source is None or dest_path is None: - result["msg"] = "src and dest are required" - elif source is not None and source.endswith("/"): - result["msg"] = "src must be a file" + if source is None: + result["msg"] = "Source is required." + elif dest_path is None: + result["msg"] = "Failed copying to remote, destination file was not created. {0}".format(tempfile.get("msg")) + elif source is not None and os.path.isdir(to_bytes(source, errors="surrogate_or_strict")): + result["msg"] = "Source must be a file." else: del result["failed"] @@ -79,11 +82,6 @@ def run(self, tmp=None, task_vars=None): result["msg"] = to_text(e) return result - if os.path.isdir(to_bytes(source, errors="surrogate_or_strict")): - result["failed"] = True - result["msg"] = to_text("NOT SUPPORTING THE DIRECTORY.") - return result - if tmp is None or "-tmp-" not in tmp: tmp = self._make_tmp_path() @@ -93,7 +91,7 @@ def run(self, tmp=None, task_vars=None): # source_rel = os.path.basename(source) except AnsibleFileNotFound as e: result["failed"] = True - result["msg"] = "could not find src=%s, %s" % (source_full, e) + result["msg"] = "Source {0} not found. {1}".format(source_full, e) self._remove_tmp_path(tmp) return result @@ -102,16 +100,6 @@ def run(self, tmp=None, task_vars=None): # else: dest_file = self._connection._shell.join_path(dest_path) - dest_status = self._execute_remote_stat( - dest_file, all_vars=task_vars, follow=False - ) - - if dest_status["exists"] and dest_status["isdir"]: - self._remove_tmp_path(tmp) - result["failed"] = True - result["msg"] = "can not use content with a dir as dest" - return result - tmp_src = self._connection._shell.join_path(tmp, "source") rendered_file = None From 87f1523c8fbb3ba707ce2aca71a0cd0b5093986d Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Thu, 7 Dec 2023 08:37:22 -0800 Subject: [PATCH 238/495] update link in managed_node doc (#1089) * update link in managed_node doc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Updated docs regarding managed node Signed-off-by: ddimatos <dimatos@gmail.com> * Doc fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> --- .../1089-update-managed_node_doc.yml | 3 + docs/source/requirements-single.rst | 150 +++++++----------- docs/source/requirements_managed.rst | 147 +++++++---------- 3 files changed, 110 insertions(+), 190 deletions(-) create mode 100644 changelogs/fragments/1089-update-managed_node_doc.yml diff --git a/changelogs/fragments/1089-update-managed_node_doc.yml b/changelogs/fragments/1089-update-managed_node_doc.yml new file mode 100644 index 000000000..e0c7ff18b --- /dev/null +++ b/changelogs/fragments/1089-update-managed_node_doc.yml @@ -0,0 +1,3 @@ +trivial: + - managed node doc - updated the managed node documentation links and content. + (https://github.com/ansible-collections/ibm_zos_core/pull/1089). diff --git a/docs/source/requirements-single.rst b/docs/source/requirements-single.rst index ca745f178..3f0b2b8e0 100644 --- a/docs/source/requirements-single.rst +++ b/docs/source/requirements-single.rst @@ -20,7 +20,7 @@ The controller is where the Ansible engine that runs the playbook is installed. Refer to RedHat Ansible Certified Content documentation for more on the `controllers dependencies`_. .. _controllers dependencies: - https://ibm.github.io/z_ansible_collections_doc/requirements/requirements_controller.html + https://ibm.github.io/z_ansible_collections_doc/requirements/requirements.html#control-node .. ........................................................................... .. © Copyright IBM Corporation 2020 . .. ........................................................................... @@ -29,130 +29,86 @@ Managed node ============ The managed z/OS node is the host that is managed by Ansible, as identified in -the Ansible inventory. -The managed node has dependencies that are specific to each release of the -**IBM z/OS core collection**. Review the details of the dependencies before you -proceed to install the IBM z/OS core collection. +the Ansible inventory. For the **IBM z/OS core collection** to manage the z/OS node, +some dependencies are required to be installed on z/OS such as: -* z/OS `V2R3`_ or `later`_ +* `z/OS`_ * `z/OS OpenSSH`_ -* Supported by `IBM Open Enterprise SDK for Python`_ - (previously `IBM Open Enterprise Python for z/OS`_) 3.8.2 or later -* `IBM Z Open Automation Utilities`_ (ZOAU) +* `z/OS® shell`_ +* `IBM Open Enterprise SDK for Python`_ +* `IBM Z Open Automation Utilities`_ .. note:: - IBM z/OS core collection is dependent on specific versions of - Z Open Automation Utilities (ZOAU). For information about the required - version of ZOAU, review the `release notes`_. For detailed instructions on - installation and configuration of ZOAU, - `Installing and Configuring ZOA Utilities`_. + Each release of the IBM z/OS core collection depends on specific dependency + versions. For information on the dependencies or the versions, review the + `release notes`_ reference section. -* The `z/OS® shell`_ +z/OS shell +---------- - .. note:: - Currently, only ``z/OS® shell`` is supported. Using - ``ansible_shell_executable`` to change the default shell is discouraged. - For more information, see `Ansible documentation`_. Shells such as ``bash`` - are not supported because they handle the reading and writing of untagged - files differently. Please review the README.ZOS guide included with the - ported ``bash`` shell for further configurations. - -.. _Installing and Configuring ZOA Utilities: - https://www.ibm.com/support/knowledgecenter/en/SSKFYE_1.1.0/install.html - -.. _Ansible documentation: - https://docs.ansible.com/ansible/2.7/user_guide/intro_inventory.html - -.. _Python on z/OS: - requirements-single.html#id1 - -.. _V2R3: - https://www.ibm.com/support/knowledgecenter/SSLTBW_2.3.0/com.ibm.zos.v2r3/en/homepage.html - -.. _later: - https://www.ibm.com/support/knowledgecenter/SSLTBW - -.. _IBM Z Open Automation Utilities: - requirements-single.html#id1 - -.. _z/OS OpenSSH: - https://www.ibm.com/support/knowledgecenter/SSLTBW_2.2.0/com.ibm.zos.v2r2.e0za100/ch1openssh.htm +Currently, only the `z/OS® shell`_ is supported. Using ``ansible_shell_executable`` +to change the default shell is discouraged. Shells such as ``bash`` are not supported +because it handles the reading and writing of untagged files differently. -.. _release notes: - release_notes.html - -.. _playbook configuration: - https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/configuration_guide.md - -.. _FAQs: - https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html - -.. _z/OS® shell: - https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm - -.. _Z Open Automation Utilities 1.1.0: - https://www.ibm.com/support/knowledgecenter/SSKFYE_1.1.0/install.html - -.. _configured IBM Open Enterprise Python on z/OS: - https://www.ibm.com/support/knowledgecenter/SSCH7P_3.8.0/install.html +Open Enterprise SDK for Python +------------------------------ -Python on z/OS --------------- - -If the Ansible target is z/OS, you must install -**IBM Open Enterprise Python for z/OS** which is ported for the z/OS platform -and required by **IBM z/OS core collection**. +The **IBM z/OS core collection** requires that the **IBM Open Enterprise SDK for Python** +be installed on z/OS. **Installation** -* Visit the `IBM Open Enterprise Python for z/OS`_ product page for FMID, +* Visit the `IBM Open Enterprise SDK for Python`_ product page for the FMID, program directory, fix list, latest PTF, installation and configuration instructions. * For reference, the Program IDs are: * 5655-PYT for the base product * 5655-PYS for service and support -* Optionally, download **IBM Open Enterprise Python for z/OS**, `here`_ -* For the supported Python version, refer to the `release notes`_. +* Optionally, `download the IBM Open Enterprise SDK for Python`_ no cost + addition for installation. -.. _IBM Open Enterprise Python for z/OS: - http://www.ibm.com/products/open-enterprise-python-zos +IBM Z Open Automation Utilities +------------------------------- -.. _IBM Open Enterprise SDK for Python: - https://www.ibm.com/products/open-enterprise-python-zos +IBM Z Open Automation Utilities provide support for executing automation tasks +on z/OS. It can run z/OS programs such as IEBCOPY, IDCAMS and IKJEFT01, perform +data set operations and much more in the scripting language of your choice. -.. _here: - https://www-01.ibm.com/marketing/iwm/platform/mrs/assets?source=swg-ibmoep +**Installation** -.. note:: +* Visit the `IBM Z Open Automation Utilities`_ product page for the FMID, + program directory, fix list, latest PTF, installation, and configuration + instructions. +* For reference, the Program IDs are: - Currently, IBM Open Enterprise Python for z/OS is the supported and - recommended Python distribution for use with Ansible and ZOAU. If - Rocket Python is the only available Python on the target, review the - `recommended environment variables`_ for Rocket Python. + * 5698-PA1 for the base product + * 5698-PAS for service and support +* Optionally, `download the IBM Z Open Automation Utilities`_ no cost + addition for installation. -.. _recommended environment variables: - https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/configuration_guide.md#variables -ZOAU ----- +.. _z/OS: + https://www.ibm.com/docs/en/zos -IBM Z Open Automation Utilities provide support for executing automation tasks -on z/OS. With ZOAU, you can run traditional MVS commands such as IEBCOPY, -IDCAMS, and IKJEFT01, as well as perform a number of data set operations -in the scripting language of your choice. +.. _z/OS OpenSSH: + https://www.ibm.com/docs/en/zos/latest?topic=zbed-zos-openssh -**Installation** +.. _z/OS® shell: + https://www.ibm.com/docs/en/zos/latest?topic=guide-zos-shells -* Visit the `ZOAU`_ product page for the FMID, program directory, fix list, - latest PTF, installation, and configuration instructions. -* For reference, the Program IDs are: +.. _IBM Open Enterprise SDK for Python: + https://www.ibm.com/products/open-enterprise-python-zos - * 5698-PA1 for the base product - * 5698-PAS for service and support -* For ZOAU supported version, refer to the `release notes`_. +.. _IBM Z Open Automation Utilities: + https://www.ibm.com/docs/en/zoau + +.. _release notes: + release_notes.html -.. _ZOAU: - https://www.ibm.com/support/knowledgecenter/en/SSKFYE +.. _download the IBM Open Enterprise SDK for Python: + https://www.ibm.com/account/reg/us-en/signup?formid=urx-49465 +.. _download the IBM Z Open Automation Utilities: + https://ibm.github.io/mainframe-downloads/downloads.html#devops \ No newline at end of file diff --git a/docs/source/requirements_managed.rst b/docs/source/requirements_managed.rst index 8be719819..24cb80f45 100644 --- a/docs/source/requirements_managed.rst +++ b/docs/source/requirements_managed.rst @@ -6,126 +6,87 @@ Managed node ============ The managed z/OS node is the host that is managed by Ansible, as identified in -the Ansible inventory. -The managed node has dependencies that are specific to each release of the -**IBM z/OS core collection**. Review the details of the dependencies before you -proceed to install the IBM z/OS core collection. +the Ansible inventory. For the **IBM z/OS core collection** to manage the z/OS node, +some dependencies are required to be installed on z/OS such as: -* z/OS `V2R3`_ or `later`_ +* `z/OS`_ * `z/OS OpenSSH`_ -* Supported by `IBM Open Enterprise SDK for Python`_ - (previously `IBM Open Enterprise Python for z/OS`_) 3.8.2 or later -* `IBM Z Open Automation Utilities`_ (ZOAU) +* `z/OS® shell`_ +* `IBM Open Enterprise SDK for Python`_ +* `IBM Z Open Automation Utilities`_ - .. note:: - IBM z/OS core collection is dependent on specific versions of - Z Open Automation Utilities (ZOAU). For information about the required - version of ZOAU, review the `release notes`_. For detailed instructions on - installation and configuration of ZOAU, - `Installing and Configuring ZOA Utilities`_. - -* The `z/OS® shell`_ - - .. note:: - Currently, only ``z/OS® shell`` is supported. Using - ``ansible_shell_executable`` to change the default shell is discouraged. - For more information, see `Ansible documentation`_. Shells such as ``bash`` - are not supported because they handle the reading and writing of untagged - files differently. Please review the README.ZOS guide included with the - ported ``bash`` shell for further configurations. - -.. _Ansible documentation: - https://docs.ansible.com/ansible/2.7/user_guide/intro_inventory.html - -.. _Python on z/OS: - requirements_managed.html#id1 - -.. _Installing and Configuring ZOA Utilities: - https://www.ibm.com/support/knowledgecenter/en/SSKFYE_1.1.0/install.html - -.. _V2R3: - https://www.ibm.com/support/knowledgecenter/SSLTBW_2.3.0/com.ibm.zos.v2r3/en/homepage.html - -.. _later: - https://www.ibm.com/support/knowledgecenter/SSLTBW - -.. _IBM Z Open Automation Utilities: - requirements_managed.html#zoau - -.. _z/OS OpenSSH: - https://www.ibm.com/support/knowledgecenter/SSLTBW_2.2.0/com.ibm.zos.v2r2.e0za100/ch1openssh.htm - -.. _release notes: - release_notes.html - -.. _playbook configuration: - https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/configuration_guide.md +.. note:: -.. _z/OS® shell: - https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm + Each release of the IBM z/OS core collection depends on specific dependency + versions. For information on the dependencies or the versions, review the + `release notes`_ reference section. -.. _Z Open Automation Utilities 1.1.0: - https://www.ibm.com/support/knowledgecenter/SSKFYE_1.1.0/install.html +z/OS shell +---------- -.. _configured IBM Open Enterprise Python on z/OS: - https://www.ibm.com/support/knowledgecenter/SSCH7P_3.8.0/install.html +Currently, only the `z/OS® shell`_ is supported. Using ``ansible_shell_executable`` +to change the default shell is discouraged. Shells such as ``bash`` are not supported +because it handles the reading and writing of untagged files differently. -Python on z/OS --------------- +Open Enterprise SDK for Python +------------------------------ -If the Ansible target is z/OS, you must install -**IBM Open Enterprise Python for z/OS** which is ported for the z/OS platform -and required by **IBM z/OS core collection**. +The **IBM z/OS core collection** requires that the **IBM Open Enterprise SDK for Python** +be installed on z/OS. **Installation** -* Visit the `IBM Open Enterprise Python for z/OS`_ product page for FMID, +* Visit the `IBM Open Enterprise SDK for Python`_ product page for the FMID, program directory, fix list, latest PTF, installation and configuration instructions. * For reference, the Program IDs are: * 5655-PYT for the base product * 5655-PYS for service and support -* Optionally, download **IBM Open Enterprise Python for z/OS**, `here`_ -* For the supported Python version, refer to the `release notes`_. - -.. _IBM Open Enterprise Python for z/OS: - http://www.ibm.com/products/open-enterprise-python-zos -.. _IBM Open Enterprise SDK for Python: - https://www.ibm.com/products/open-enterprise-python-zos +* Optionally, `download the IBM Open Enterprise SDK for Python`_ no cost + addition for installation. -.. _here: - https://www-01.ibm.com/marketing/iwm/platform/mrs/assets?source=swg-ibmoep - -.. note:: - - Currently, IBM Open Enterprise Python for z/OS is the supported and - recommended Python distribution for use with Ansible and ZOAU. If - Rocket Python is the only available Python on the target, review the - `recommended environment variables`_ for Rocket Python. - -.. _recommended environment variables: - https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/configuration_guide.md#variables - -ZOAU ----- +IBM Z Open Automation Utilities +------------------------------- IBM Z Open Automation Utilities provide support for executing automation tasks -on z/OS. With ZOAU, you can run traditional MVS commands such as IEBCOPY, -IDCAMS, and IKJEFT01, as well as perform a number of data set operations -in the scripting language of your choice. +on z/OS. It can run z/OS programs such as IEBCOPY, IDCAMS and IKJEFT01, perform +data set operations and much more in the scripting language of your choice. **Installation** -* Visit the `ZOAU`_ product page for the FMID, program directory, fix list, - latest PTF, installation, and configuration instructions. +* Visit the `IBM Z Open Automation Utilities`_ product page for the FMID, + program directory, fix list, latest PTF, installation, and configuration + instructions. * For reference, the Program IDs are: * 5698-PA1 for the base product * 5698-PAS for service and support -* For ZOAU supported version, refer to the `release notes`_. +* Optionally, `download the IBM Z Open Automation Utilities`_ no cost + addition for installation. + + +.. _z/OS: + https://www.ibm.com/docs/en/zos + +.. _z/OS OpenSSH: + https://www.ibm.com/docs/en/zos/latest?topic=zbed-zos-openssh + +.. _z/OS® shell: + https://www.ibm.com/docs/en/zos/latest?topic=guide-zos-shells + +.. _IBM Open Enterprise SDK for Python: + https://www.ibm.com/products/open-enterprise-python-zos + +.. _IBM Z Open Automation Utilities: + https://www.ibm.com/docs/en/zoau + +.. _release notes: + release_notes.html -.. _ZOAU: - https://www.ibm.com/support/knowledgecenter/en/SSKFYE +.. _download the IBM Open Enterprise SDK for Python: + https://www.ibm.com/account/reg/us-en/signup?formid=urx-49465 +.. _download the IBM Z Open Automation Utilities: + https://ibm.github.io/mainframe-downloads/downloads.html#devops \ No newline at end of file From 6868de2134d88f8012d2c751bedd541c6217a014 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Thu, 7 Dec 2023 15:56:15 -0600 Subject: [PATCH 239/495] First check of solution --- plugins/action/zos_job_submit.py | 1 - plugins/modules/zos_job_submit.py | 21 +++++++++------------ tests/sanity/ignore-2.15.txt | 3 --- 3 files changed, 9 insertions(+), 16 deletions(-) diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index c28fcec76..e7d4128ed 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -135,7 +135,6 @@ def run(self, tmp=None, task_vars=None): result = {} copy_module_args = {} module_args = self._task.args.copy() - module_args["temp_file"] = dest_path copy_module_args.update( dict( diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 11f0f3ccb..397598e42 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -611,7 +611,7 @@ job_output, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( data_set, @@ -624,19 +624,19 @@ from timeit import default_timer as timer from tempfile import NamedTemporaryFile from os import remove +import traceback from time import sleep import re try: - from zoautil_py.exceptions import ZOAUException, JobSubmitException + from zoautil_py import exceptions except ImportError: - ZOAUException = MissingZOAUImport() - JobSubmitException = MissingZOAUImport() + exceptions = ZOAUImportError(traceback.format_exc()) try: from zoautil_py import jobs except Exception: - jobs = MissingZOAUImport() + jobs = ZOAUImportError(traceback.format_exc()) if PY3: from shlex import quote @@ -731,7 +731,7 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, # ZOAU throws a ZOAUException when the job sumbission fails thus there is no # JCL RC to share with the user, if there is a RC, that will be processed # in the job_output parser. - except ZOAUException as err: + except exceptions.ZOAUException as err: result["changed"] = False result["failed"] = True result["stderr"] = str(err) @@ -746,7 +746,7 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, # ZOAU throws a JobSubmitException when timeout has execeeded in that no job_id # has been returned within the allocated time. - except JobSubmitException as err: + except exceptions.JobSubmitException as err: result["changed"] = False result["failed"] = False result["stderr"] = str(err) @@ -815,7 +815,6 @@ def run_module(): return_output=dict(type="bool", required=False, default=True), wait_time_s=dict(type="int", default=10), max_rc=dict(type="int", required=False), - temp_file=dict(type="path", required=False), use_template=dict(type='bool', default=False), template_parameters=dict( type='dict', @@ -877,7 +876,6 @@ def run_module(): return_output=dict(arg_type="bool", default=True), wait_time_s=dict(arg_type="int", required=False, default=10), max_rc=dict(arg_type="int", required=False), - temp_file=dict(arg_type="path", required=False), ) # ******************************************************************** @@ -902,7 +900,7 @@ def run_module(): from_encoding = parsed_args.get("from_encoding") to_encoding = parsed_args.get("to_encoding") # temporary file names for copied files when user sets location to LOCAL - temp_file = parsed_args.get("temp_file") + temp_file = parsed_args.get("src") temp_file_encoded = None # Default 'changed' is False in case the module is not able to execute @@ -920,7 +918,6 @@ def run_module(): job_submitted_id = None duration = 0 start_time = timer() - if location == "DATA_SET": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=False, volume=volume, start_time=start_time) @@ -1039,7 +1036,7 @@ def run_module(): module.exit_json(**result) finally: - if temp_file: + if location != "DATA_SET" and location != "USS": remove(temp_file) # If max_rc is set, we don't want to default to changed=True, rely on 'is_changed' diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 415196660..8099f00e0 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -17,9 +17,6 @@ plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed un plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From f53998050612663991457aee49a35d5b9a2eed4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Mon, 11 Dec 2023 12:31:57 -0600 Subject: [PATCH 240/495] Add clear solution --- plugins/action/zos_job_submit.py | 1 + plugins/modules/zos_job_submit.py | 36 ++++--------------------------- 2 files changed, 5 insertions(+), 32 deletions(-) diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index e7d4128ed..c98f1d451 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -142,6 +142,7 @@ def run(self, tmp=None, task_vars=None): dest=dest_path, mode="0600", force=True, + encoding=module_args.get('encoding'), remote_src=True, ) ) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 397598e42..ff975dbc1 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -620,7 +620,6 @@ DataSet, ) from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six import PY3 from timeit import default_timer as timer from tempfile import NamedTemporaryFile from os import remove @@ -638,12 +637,6 @@ except Exception: jobs = ZOAUImportError(traceback.format_exc()) -if PY3: - from shlex import quote -else: - from pipes import quote - - JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) MAX_WAIT_TIME_S = 86400 @@ -922,32 +915,11 @@ def run_module(): job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=False, volume=volume, start_time=start_time) elif location == "USS": - job_submitted_id, duration = submit_src_jcl(module, src, src_name=src, timeout=wait_time_s, hfs=True) + job_submitted_id, duration = submit_src_jcl( + module, src, src_name=src, timeout=wait_time_s, hfs=True) else: - # added -c to iconv to prevent '\r' from erroring as invalid chars to EBCDIC - conv_str = "iconv -c -f {0} -t {1} {2} > {3}".format( - from_encoding, - to_encoding, - quote(temp_file), - quote(temp_file_encoded.name), - ) - - conv_rc, stdout, stderr = module.run_command( - conv_str, - use_unsafe_shell=True, - ) - - if conv_rc == 0: - job_submitted_id, duration = submit_src_jcl( - module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) - else: - result["failed"] = True - result["stdout"] = stdout - result["stderr"] = stderr - result["msg"] = ("Failed to convert the src {0} from encoding {1} to " - "encoding {2}, unable to submit job." - .format(src, from_encoding, to_encoding)) - module.fail_json(**result) + job_submitted_id, duration = submit_src_jcl( + module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) try: # Explictly pass None for the unused args else a default of '*' will be From 567f3846bcd1934410fe46d7152b4eb616e90154 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 11:47:14 -0600 Subject: [PATCH 241/495] Return local --- plugins/action/zos_job_submit.py | 1 - plugins/modules/zos_job_submit.py | 32 +++++++++++++++++++++++++++++-- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index c98f1d451..e7d4128ed 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -142,7 +142,6 @@ def run(self, tmp=None, task_vars=None): dest=dest_path, mode="0600", force=True, - encoding=module_args.get('encoding'), remote_src=True, ) ) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index ff975dbc1..4177d9e94 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -620,6 +620,7 @@ DataSet, ) from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.six import PY3 from timeit import default_timer as timer from tempfile import NamedTemporaryFile from os import remove @@ -637,6 +638,11 @@ except Exception: jobs = ZOAUImportError(traceback.format_exc()) +if PY3: + from shlex import quote +else: + from pipes import quote + JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) MAX_WAIT_TIME_S = 86400 @@ -918,8 +924,30 @@ def run_module(): job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=True) else: - job_submitted_id, duration = submit_src_jcl( - module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) + # added -c to iconv to prevent '\r' from erroring as invalid chars to EBCDIC + conv_str = "iconv -c -f {0} -t {1} {2} > {3}".format( + from_encoding, + to_encoding, + quote(temp_file), + quote(temp_file_encoded.name), + ) + + conv_rc, stdout, stderr = module.run_command( + conv_str, + use_unsafe_shell=True, + ) + + if conv_rc == 0: + job_submitted_id, duration = submit_src_jcl( + module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) + else: + result["failed"] = True + result["stdout"] = stdout + result["stderr"] = stderr + result["msg"] = ("Failed to convert the src {0} from encoding {1} to " + "encoding {2}, unable to submit job." + .format(src, from_encoding, to_encoding)) + module.fail_json(**result) try: # Explictly pass None for the unused args else a default of '*' will be From f726333503f4510cfeae408bd784eb59ec52af6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 11:49:18 -0600 Subject: [PATCH 242/495] Add test ignores --- tests/sanity/ignore-2.14.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 415196660..8099f00e0 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -17,9 +17,6 @@ plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed un plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 3d5d7e86de002532c4d71a50b79a976e3dcd0ebb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 14:46:28 -0600 Subject: [PATCH 243/495] Add ignore to 2.16 --- tests/sanity/ignore-2.16.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 415196660..8099f00e0 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -17,9 +17,6 @@ plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed un plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_job_submit.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_job_submit.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_job_submit.py pylint:catching-non-exception # False positive, Exception is inherited plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 8af3270af80d0db6799eb7e4e954fc54450ee9cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 17:37:57 -0600 Subject: [PATCH 244/495] Optimize the encoding --- ...cumented_argument_and_import_exception.yml | 10 +++++ plugins/action/zos_job_submit.py | 1 + plugins/modules/zos_job_submit.py | 43 +++---------------- 3 files changed, 16 insertions(+), 38 deletions(-) create mode 100644 changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml diff --git a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml new file mode 100644 index 000000000..5d1cf4d60 --- /dev/null +++ b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml @@ -0,0 +1,10 @@ +trivial: + - zos_job_submit - The module handling ZOAU import errors obscured the + original traceback when an import error ocurred. Fix now passes correctly + the context to the user. + (https://github.com/ansible-collections/ibm_zos_core/pull/1091). + + - zos_job_submit - The module had undocumented parameter and uses as temporary file + when the location of the file is LOCAL. Change now uses the same name as the src + for the temporary file removing the addition of tmp_file to the arguments. + (https://github.com/ansible-collections/ibm_zos_core/pull/1091). \ No newline at end of file diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index e7d4128ed..c98f1d451 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -142,6 +142,7 @@ def run(self, tmp=None, task_vars=None): dest=dest_path, mode="0600", force=True, + encoding=module_args.get('encoding'), remote_src=True, ) ) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 4177d9e94..36dc1357d 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -620,7 +620,6 @@ DataSet, ) from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six import PY3 from timeit import default_timer as timer from tempfile import NamedTemporaryFile from os import remove @@ -638,10 +637,6 @@ except Exception: jobs = ZOAUImportError(traceback.format_exc()) -if PY3: - from shlex import quote -else: - from pipes import quote JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) @@ -896,11 +891,8 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") - from_encoding = parsed_args.get("from_encoding") - to_encoding = parsed_args.get("to_encoding") - # temporary file names for copied files when user sets location to LOCAL - temp_file = parsed_args.get("src") - temp_file_encoded = None + if location == "LOCAL": + temp_file = parsed_args.get("src") # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) @@ -911,9 +903,6 @@ def run_module(): "be greater than 0 and less than {0}.".format(str(MAX_WAIT_TIME_S))) module.fail_json(**result) - if temp_file: - temp_file_encoded = NamedTemporaryFile(delete=True) - job_submitted_id = None duration = 0 start_time = timer() @@ -924,30 +913,8 @@ def run_module(): job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=True) else: - # added -c to iconv to prevent '\r' from erroring as invalid chars to EBCDIC - conv_str = "iconv -c -f {0} -t {1} {2} > {3}".format( - from_encoding, - to_encoding, - quote(temp_file), - quote(temp_file_encoded.name), - ) - - conv_rc, stdout, stderr = module.run_command( - conv_str, - use_unsafe_shell=True, - ) - - if conv_rc == 0: - job_submitted_id, duration = submit_src_jcl( - module, temp_file_encoded.name, src_name=src, timeout=wait_time_s, hfs=True) - else: - result["failed"] = True - result["stdout"] = stdout - result["stderr"] = stderr - result["msg"] = ("Failed to convert the src {0} from encoding {1} to " - "encoding {2}, unable to submit job." - .format(src, from_encoding, to_encoding)) - module.fail_json(**result) + job_submitted_id, duration = submit_src_jcl( + module, temp_file, src_name=src, timeout=wait_time_s, hfs=True) try: # Explictly pass None for the unused args else a default of '*' will be @@ -1036,7 +1003,7 @@ def run_module(): module.exit_json(**result) finally: - if location != "DATA_SET" and location != "USS": + if location == "LOCAL": remove(temp_file) # If max_rc is set, we don't want to default to changed=True, rely on 'is_changed' From e03622bfd87d64de17ec3db918e8984524b49ccb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 12 Dec 2023 17:55:24 -0600 Subject: [PATCH 245/495] Remove unused import --- plugins/modules/zos_job_submit.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 36dc1357d..4b15cb424 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -621,7 +621,6 @@ ) from ansible.module_utils.basic import AnsibleModule from timeit import default_timer as timer -from tempfile import NamedTemporaryFile from os import remove import traceback from time import sleep From 92e2f899476e78b919bbb2467b158455ff3918ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 13 Dec 2023 11:05:52 -0600 Subject: [PATCH 246/495] Return encoded --- plugins/modules/zos_job_submit.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 4b15cb424..d4a5db1ea 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -890,6 +890,8 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") + from_encoding = parsed_args.get("from_encoding") + to_encoding = parsed_args.get("to_encoding") if location == "LOCAL": temp_file = parsed_args.get("src") From 51e51f1710861c273e6ac4b42dd7eca1b77c4cbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 13 Dec 2023 11:20:54 -0600 Subject: [PATCH 247/495] Add encoding --- tests/functional/modules/test_zos_job_submit_func.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 44dfdbf01..9e2d6e400 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -343,6 +343,7 @@ def test_job_submit_LOCAL(ansible_zos_module): results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) for result in results.contacted.values(): + print(result) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True From bad1fc9122843a454196396d93c773a7ea4820f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 13 Dec 2023 11:21:46 -0600 Subject: [PATCH 248/495] Remove unused encoded --- plugins/modules/zos_job_submit.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index d4a5db1ea..4b15cb424 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -890,8 +890,6 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") - from_encoding = parsed_args.get("from_encoding") - to_encoding = parsed_args.get("to_encoding") if location == "LOCAL": temp_file = parsed_args.get("src") From 6ebbbf5c922347021781375d82abb4a10a8470d4 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 11:05:17 -0600 Subject: [PATCH 249/495] Modified logic to remove tmp files if they exist and not only if location is local --- plugins/modules/zos_job_submit.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 4b15cb424..a0af50054 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -911,9 +911,9 @@ def run_module(): elif location == "USS": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, hfs=True) - else: + elif location == "LOCAL": job_submitted_id, duration = submit_src_jcl( - module, temp_file, src_name=src, timeout=wait_time_s, hfs=True) + module, src, src_name=src, timeout=wait_time_s, hfs=True) try: # Explictly pass None for the unused args else a default of '*' will be @@ -1002,7 +1002,7 @@ def run_module(): module.exit_json(**result) finally: - if location == "LOCAL": + if temp_file is not None: remove(temp_file) # If max_rc is set, we don't want to default to changed=True, rely on 'is_changed' From bd2505b96bd444371e1840c3d0a446059789a6a1 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 11:15:54 -0600 Subject: [PATCH 250/495] Added temp_file null definition --- plugins/modules/zos_job_submit.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index a0af50054..4e2a1d52e 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -890,8 +890,7 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") - if location == "LOCAL": - temp_file = parsed_args.get("src") + temp_file = parsed_args.get("src") if location == "LOCAL" else None # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) From fdb9b76becd81f42206398ac847ba0f6d71e07a4 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 12:20:15 -0600 Subject: [PATCH 251/495] Update 1091-Update_undocumented_argument_and_import_exception.yml --- ...1091-Update_undocumented_argument_and_import_exception.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml index 5d1cf4d60..d1d1560f8 100644 --- a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml +++ b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml @@ -1,4 +1,4 @@ -trivial: +minor_changes: - zos_job_submit - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. @@ -7,4 +7,4 @@ trivial: - zos_job_submit - The module had undocumented parameter and uses as temporary file when the location of the file is LOCAL. Change now uses the same name as the src for the temporary file removing the addition of tmp_file to the arguments. - (https://github.com/ansible-collections/ibm_zos_core/pull/1091). \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/1091). From cd3638f30de5f55b08132f8355b2f18bba28eaa4 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 12:58:25 -0600 Subject: [PATCH 252/495] Merge release v1.8.0 into dev branch (#1095) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Staging v1.7.0 beta.1 (#915) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan… * Staging v1.7.0 beta.2 (#939) * Enhancement/866 archive (#930) * Added action plugin zos_unarchive * Added zos_archive changes * Added zos_unarchive changes * Added zos_archive tests changes * Added test zos_unarchive changes * Added zos_archive changes * fixed pep8 issues * Changed source to src in docs * Added correct copyright year * Updated docs * Added changelog fragments * Updated docs * Updated galaxy.yml * Updated meta * Updated docs * Added zos_gather_facts rst * Added changelog * Added release notes * Changed variable name to avoid shadowing import * Delete 930-archive-post-beta.yml * Delete v1.7.0-beta.2_summary.yml * Staging v1.7.0 merge to main (#1019) * Galaxy 1.7 updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update meta runtime to support ansible-core 2.14 or later Signed-off-by: ddimatos <dimatos@gmail.com> * Update ibm_zos_core_meta.yml with updated version Signed-off-by: ddimatos <dimatos@gmail.com> * Update readme to align to supported ansible versions and new urls Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional sanity ignore files to the exclude list Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional sanity ignore files to the exclude list for ansible-lint. Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright yrs for source files that were overlooked Signed-off-by: ddimatos <dimatos@gmail.com> * Remove requirements from module doc, rely on offerings minimum requirements, also zoau 1.2.1 never was supported Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog summary for 1.7 Signed-off-by: ddimatos <dimatos@gmail.com> * Adding generated antsibull-changelog release changelog and artifacts Signed-off-by: ddimatos <dimatos@gmail.com> * Remove v1.7.0_summary, its no longer needed Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes for ac 1.7.0 Signed-off-by: ddimatos <dimatos@gmail.com> * Remove unsupported collection versions requiring a version of zoau that is EOS Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Staging v1.8.0 beta.1 (#1037) * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Update ibm_zos_core_meta.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Bugfix/619/mode set for files applied test case (#757) * Add test case for copy d… * [v1.8.0][Backport] Clean temporary data sets created during XMIT unarchive operation (#1054) * Clean temporary data sets created during XMIT unarchive operation (#1049) * Added a temp cleanup * Added changelog * Modified changelog * Added removal of src if remote_src is False Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Modified changelog fragments --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Update 1049-xmit-temporary-data-sets.yml modified PR number --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Cherry picked v1.8.0 (#1063) * Bug 1041 zos submit job honor return output literally (#1058) * initial commit to pass return_output to job_output. * corrected fragment name to match branch * tweaked data set test to show result values if positive test fails * removed trace in zos_data_set, and added trace output to job_submit * removed extra text from functional testing. * put in correct PR number in changelog fragment. * changed trivial to minor_changes, added documentation to dd_scan in job:job_output. * 1043 bug title zos operator is passing wrong value to zoauopercmd (#1044) * corrected kwarg index value from 'wait_arg' to 'wait' Also corrected true/false issue in zoaq * Added and updated changelog. * update PR number in changelog fragment * changed test from \$ to \\$ to eliminate warning * added blocking test to maks sure minimum wait is reached in zoau>1.2.4.5 * removed the else condition from the blocking test, since it is not needed. * corrected tense grammer in changelog fragment * corrected capitalization of ZOAU in changelog fragment. * updated changelog to point to the backport PR * [v1.8.0] [Backport] [zos_script] remote_tmp for zos_script (#1068) * Enabler/1024/remote_tmp for zos_script (#1060) * Changed tmp_path for Ansible's remote_tmp * Remove tmp_path from module's options * Update module documentation * Remove tmp_path test case * Update zos_script's RST file * Add changelog fragment * Updated module examples * Update changelog fragment * [v1.8.0][zos_job_submit] Removes tmp files left behind by zos_job_submit (#1070) * Ensure proper cleanup for ansiballz * Added proper removal of AnsiballZ * [v1.8.0][zos_copy][backport] File wrongly modified after second copy (#1069) * [zos_copy] Files corrupted after second copy (#1064) * Initial change to replace shutil.copy * Added fix for corrupted directory copies * Added changelog fragment * Modified docstring and fixed copy_tree * Added punctiation * Added copystat * Added set mode for dirs * Update 1064-corruped-second-copy.yml * Updated changelog * [v1.8.0] [backport] [Documentation] [zos_tso_command] Add REXX exec example (#1072) * [Documentation] [zos_tso_command] Add REXX exec example (#1065) * Add REXX exec example * Add fragment * Update module documentation * Fix PR link * Reword example task name * Updated REXX example * Update changelog fragment * Update RST file --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * [v1.8.0] [Backport] [zos_copy] mvs to non existent mvs copy verify destination attrs match (#1067) * Add changes * Add fragment * Modify fragment * Modify fragment * [zos_copy] fix for executables copied from local fail with iconv error (#1079) * Added fix for executables copied from local and test * Added changelog * update link in managed_node doc (#1089) * update link in managed_node doc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Updated docs regarding managed node Signed-off-by: ddimatos <dimatos@gmail.com> * Doc fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * Merge staging-v1.8.0-tasks into staging-v1.8.0 (#1090) * Modified galaxy version * mofidied meta * Update copyright year * Generated module docs * Created changelog * Removed changelog fragments * Updated changelog and release notes * Fixed newline sequences * Update CHANGELOG.rst * Update CHANGELOG.rst * Corrected release notes * Removed duplicated import zoau --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> --- CHANGELOG.rst | 21 ++- changelogs/changelog.yaml | 49 +++++++ docs/source/release_notes.rst | 44 ++++-- docs/source/requirements-single.rst | 12 +- galaxy.yml | 2 +- meta/ibm_zos_core_meta.yml | 4 +- plugins/action/zos_job_submit.py | 20 ++- plugins/module_utils/data_set.py | 2 +- plugins/module_utils/encode.py | 2 +- plugins/module_utils/job.py | 2 +- plugins/module_utils/mvs_cmd.py | 2 +- plugins/module_utils/zoau_version_checker.py | 2 +- plugins/modules/zos_blockinfile.py | 2 +- plugins/modules/zos_copy.py | 20 ++- plugins/modules/zos_job_submit.py | 2 +- plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_operator_action_query.py | 2 +- plugins/modules/zos_tso_command.py | 2 +- .../functional/modules/test_zos_copy_func.py | 131 ++++++++++++++++++ 19 files changed, 274 insertions(+), 49 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 2c2815de4..a5883246e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,13 +5,13 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.8.0-beta.1 -============= +v1.8.0 +====== Release Summary --------------- -Release Date: '2023-10-24' +Release Date: '2023-12-08' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review @@ -31,18 +31,31 @@ Minor Changes - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) - zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_script - add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). +- zos_submit_job - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). - zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) -- zos_copy - add support in zos_copy for text files and data sets containing ASA control characters. (https://github.com/ansible-collections/ibm_zos_core/pull/1028) + +Deprecated Features +------------------- + +- zos_blockinfile - debug is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). Bugfixes -------- - zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). +- zos_copy - When copying an executable data set from controller to managed node, copy operation failed with an encoding error. Fix now avoids encoding when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). +- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). +- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). - zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) - zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) - zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) - zos_operator - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. https://github.com/ansible-collections/ibm_zos_core/pull/918) +- zos_operator - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_operator_action_query - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). Known Issues ------------ diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 2e50559d7..35eeaebb0 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1023,6 +1023,55 @@ releases: - 930-archive-post-beta.yml - v1.7.0-beta.2_summary.yml release_date: '2023-08-21' + 1.8.0: + changes: + bugfixes: + - zos_copy - When copying an executable data set from controller to managed + node, copy operation failed with an encoding error. Fix now avoids encoding + when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). + - zos_copy - When copying an executable data set with aliases and destination + did not exist, destination data set was created with wrong attributes. Fix + now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). + - zos_copy - When performing a copy operation to an existing file, the copied + file resulted in having corrupted contents. Fix now implements a workaround + to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). + - zos_operator - The module was ignoring the wait time argument. The module + now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_operator_action_query - The module was ignoring the wait time argument. + The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_unarchive - When zos_unarchive fails during unpack either with xmit or + terse it does not clean the temporary data sets created. Fix now removes the + temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). + minor_changes: + - zos_script - Add support for remote_tmp from the Ansible configuration to + setup where temporary files will be created, replacing the module option tmp_path. + (https://github.com/ansible-collections/ibm_zos_core/pull/1068). + - zos_job_submit - Previous code did not return output, but still requested + job data from the target system. This changes to honor return_output=false + by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_tso_command - Add example for executing explicitly a REXX script from + a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). + release_summary: 'Release Date: ''2023-12-08'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1041-bug-zos-submit-job-honor-return-output-literally.yml + - 1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml + - 1049-xmit-temporary-data-sets.yml + - 1060-remote_tmp_zos_script.yml + - 1067-mvs_to_non_existent_mvs_copy_verify_destination_attrs_match.yml + - 1069-corrupted-second-copy.yml + - 1072-rexx-exec-tso_command.yml + - 1079-zos-copy-local-executable.yml + - 1089-update-managed_node_doc.yml + - v1.8.0-summary.yml + release_date: '2023-12-08' 1.8.0-beta.1: changes: bugfixes: diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 10150952d..b198d74de 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,8 +6,8 @@ Releases ======== -Version 1.8.0-beta.1 -==================== +Version 1.8.0 +============= New Modules ----------- @@ -22,29 +22,46 @@ Minor Changes - Enhanced test cases to use test lines the same length of the record length. - ``zos_copy`` - - Add validation into path joins to detect unauthorized path traversals. + - Add validation into path joins to detect unauthorized path traversals. - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. - - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. - - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. - - add support in zos_copy for text files and data sets containing ASA control characters. + - Includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. + - Introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. + - Add support in zos_copy for text files and data sets containing ASA control characters. - ``zos_fetch`` - Add validation into path joins to detect unauthorized path traversals. -- ``zos_job_submit`` - Change action plugin call from copy to zos_copy. -- ``zos_operator`` - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. +- ``zos_job_submit`` + + - Change action plugin call from copy to zos_copy. + - Previous code did not return output, but still requested job data from the target system. This changes to honor `return_output=false` by not querying the job dd segments at all. +- ``zos_operator`` - Changed system to call `wait=true` parameter to zoau call. Requires zoau 1.2.5 or later. - ``zos_operator_action_query`` - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. - ``zos_unarchive`` - - Add validation into path joins to detect unauthorized path traversals. + - Add validation into path joins to detect unauthorized path traversals. - Enhanced test cases to use test lines the same length of the record length. - ``module_utils/template`` - Add validation into path joins to detect unauthorized path traversals. +- ``zos_tso_command`` - Add example for executing explicitly a REXX script from a data set. +- ``zos_script`` - Add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. Bugfixes -------- -- ``zos_copy`` - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. -- ``zos_job_submit`` - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. -- ``zos_job_submit`` - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. +- ``zos_copy`` + + - Update option to include `LIBRARY` as dest_dataset/suboption value. Documentation updated to reflect this change. + - When copying an executable data set from controller to managed node, copy operation failed with an encoding error. Fix now avoids encoding when `executable` option is selected. + - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. + - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. +- ``zos_job_submit`` + + - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. + - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. - ``zos_lineinfile`` - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. -- ``zos_operator`` - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. +- ``zos_operator`` + + - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. + - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. +- ``zos_operator_action_query`` - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. +- ``zos_unarchive`` - When zos_unarchive fails during unpack either with xmit or terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. Known Issues ------------ @@ -55,6 +72,7 @@ Known Issues Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ diff --git a/docs/source/requirements-single.rst b/docs/source/requirements-single.rst index 3f0b2b8e0..e31c9636a 100644 --- a/docs/source/requirements-single.rst +++ b/docs/source/requirements-single.rst @@ -12,7 +12,7 @@ Requirements The **IBM z/OS core collection** requires both a **control node** and **managed node** be configured with a minimum set of requirements. The control node is often referred to as the **controller** and the -managed node as the **host**. +managed node as the **host** or **target**. Control node ============ @@ -32,17 +32,19 @@ The managed z/OS node is the host that is managed by Ansible, as identified in the Ansible inventory. For the **IBM z/OS core collection** to manage the z/OS node, some dependencies are required to be installed on z/OS such as: +* `z/OS`_ * `z/OS`_ * `z/OS OpenSSH`_ * `z/OS® shell`_ * `IBM Open Enterprise SDK for Python`_ * `IBM Z Open Automation Utilities`_ - .. note:: - Each release of the IBM z/OS core collection depends on specific dependency - versions. For information on the dependencies or the versions, review the - `release notes`_ reference section. +.. note:: + + Each release of the IBM z/OS core collection depends on specific dependency + versions. For information on the dependencies or the versions, review the + `release notes`_ reference section. z/OS shell ---------- diff --git a/galaxy.yml b/galaxy.yml index b83b1014a..f7be530c7 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.8.0-beta.1 +version: 1.8.0 # Collection README file readme: README.md diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 7a68a05bb..e1ee28246 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.8.0-beta.1" +version: "1.8.0" managed_requirements: - name: "IBM Open Enterprise SDK for Python" @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - "1.2.3" + - "1.2.4" diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index c28fcec76..12ec5514a 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -57,10 +57,14 @@ def run(self, tmp=None, task_vars=None): source = self._task.args.get("src", None) # Get a temporary file on the managed node - tempfile = self._execute_module( - module_name="tempfile", module_args=dict(state="file"), task_vars=task_vars, - ) - dest_path = tempfile.get("path") + dest_path = self._execute_module( + module_name="tempfile", module_args={}, task_vars=task_vars, + ).get("path") + # Calling execute_module from this step with tempfile leaves behind a tmpdir. + # This is called to ensure the proper removal. + tmpdir = self._connection._shell.tmpdir + if tmpdir: + self._remove_tmp_path(tmpdir) result["failed"] = True if source is None: @@ -166,14 +170,6 @@ def run(self, tmp=None, task_vars=None): ) else: result.update(dict(failed=True)) - if rendered_file: - os.remove(rendered_file) - if os.path.isfile(tmp_src): - self._connection.exec_command("rm -rf {0}".format(tmp_src)) - if os.path.isfile(dest_file): - self._connection.exec_command("rm -rf {0}".format(dest_file)) - if os.path.isfile(source_full): - self._connection.exec_command("rm -rf {0}".format(source_full)) else: result.update( diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index cae505804..12265e1b4 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index c36d0b272..26bb983b3 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 94a65d8c3..1b8cb06f6 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index ec4955ac6..6331a1772 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index 41dd35276..12470ef19 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 1751c6472..7a2adf7cc 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index a2e545d8b..5d68d78a5 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2683,7 +2683,7 @@ def run_module(module, arg_def): # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. - if not is_binary and not is_uss: + if not is_binary and not is_uss and not executable: new_src = temp_path or src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). @@ -2761,6 +2761,21 @@ def run_module(module, arg_def): # dest_data_set.type overrides `dest_ds_type` given precedence rules if dest_data_set and dest_data_set.get("type"): dest_ds_type = dest_data_set.get("type") + elif executable: + """ When executable is selected and dest_exists is false means an executable PDSE was copied to remote, + so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY. + Not using LIBRARY at this step since there are many checks with dest_ds_type in data_set.DataSet.MVS_PARTITIONED + and LIBRARY is not in MVS_PARTITIONED frozen set.""" + dest_ds_type = "PDSE" + + if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): + dest_has_asa_chars = True + elif not dest_exists and asa_text: + dest_has_asa_chars = True + elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: + dest_attributes = datasets.listing(dest_name)[0] + if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA': + dest_has_asa_chars = True if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): dest_has_asa_chars = True @@ -3052,7 +3067,7 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- # Copy to PDS/PDSE # --------------------------------------------------------------------- - elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED: + elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED or dest_ds_type == "LIBRARY": if not remote_src and not copy_member and os.path.isdir(temp_path): temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src))) @@ -3272,6 +3287,7 @@ def main(): not module.params.get("encoding") and not module.params.get("remote_src") and not module.params.get("is_binary") + and not module.params.get("executable") ): module.params["encoding"] = { "from": module.params.get("local_charset"), diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 11f0f3ccb..a099bd135 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 273b0a867..ca6935163 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index ccf565626..a035cad33 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 87b157318..28b033a90 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index b42dd9500..42a08890a 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -3428,6 +3428,137 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set(name=dest_lib, state="absent") hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") + +@pytest.mark.pdse +@pytest.mark.loadlib +@pytest.mark.aliases +@pytest.mark.parametrize("is_created", [False, True]) +def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): + + hosts = ansible_zos_module + + cobol_src_pds = "USER.COBOL.SRC" + cobol_src_mem = "HELLOCBL" + cobol_src_mem2 = "HICBL2" + src_lib = "USER.LOAD.SRC" + dest_lib = "USER.LOAD.DEST" + pgm_mem = "HELLO" + pgm2_mem = "HELLO2" + + + try: + # allocate pds for cobol src code + hosts.all.zos_data_set( + name=cobol_src_pds, + state="present", + type="pds", + space_primary=2, + record_format="FB", + record_length=80, + block_size=3120, + replace=True, + ) + # allocate pds for src loadlib + hosts.all.zos_data_set( + name=src_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + # copy cobol src + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mem)) + # copy cobol2 src + hosts.all.zos_copy(content=COBOL_SRC.format(COBOL_PRINT_STR2), dest='{0}({1})'.format(cobol_src_pds, cobol_src_mem2)) + + # run link-edit for pgm1 + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, src_lib, pgm_mem) + assert link_rc == 0 + # run link-edit for pgm2 + link_rc = link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem2, src_lib, pgm2_mem, loadlib_alias_mem="ALIAS2") + assert link_rc == 0 + + # execute pgm to test pgm1 + validate_loadlib_pgm(hosts, steplib=src_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) + + # fetch loadlib into local + tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") + # fetch loadlib to local + fetch_result = hosts.all.zos_fetch(src=src_lib, dest=tmp_folder.name, is_binary=True) + for res in fetch_result.contacted.values(): + source_path = res.get("dest") + + if not is_created: + # ensure dest data sets absent for this variation of the test case. + hosts.all.zos_data_set(name=dest_lib, state="absent") + else: + # allocate dest loadlib to copy over without an alias. + hosts.all.zos_data_set( + name=dest_lib, + state="present", + type="pdse", + record_format="U", + record_length=0, + block_size=32760, + space_primary=2, + space_type="M", + replace=True + ) + + if not is_created: + # dest data set does not exist, specify it in dest_dataset param. + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src=source_path, + dest="{0}".format(dest_lib), + executable=True, + aliases=False, + dest_data_set={ + 'type': "PDSE", + 'record_format': "U", + 'record_length': 0, + 'block_size': 32760, + 'space_primary': 2, + 'space_type': "M", + } + ) + else: + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src=source_path, + dest="{0}".format(dest_lib), + executable=True, + aliases=False + ) + + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") == "{0}".format(dest_lib) + + # check ALIAS keyword and name in mls output + verify_copy_mls = hosts.all.shell( + cmd="mls {0}".format(dest_lib), + executable=SHELL_EXECUTABLE + ) + for v_cp in verify_copy_mls.contacted.values(): + assert v_cp.get("rc") == 0 + stdout = v_cp.get("stdout") + assert stdout is not None + # number of members + assert len(stdout.splitlines()) == 2 + + finally: + hosts.all.zos_data_set(name=cobol_src_pds, state="absent") + hosts.all.zos_data_set(name=src_lib, state="absent") + hosts.all.zos_data_set(name=dest_lib, state="absent") + + @pytest.mark.pdse @pytest.mark.loadlib @pytest.mark.aliases From 54ebf936068e11605d25242b8008eb814f85618b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 15 Dec 2023 13:22:40 -0600 Subject: [PATCH 253/495] Removed print statement in test --- tests/functional/modules/test_zos_job_submit_func.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 9e2d6e400..44dfdbf01 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -343,7 +343,6 @@ def test_job_submit_LOCAL(ansible_zos_module): results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) for result in results.contacted.values(): - print(result) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True From 00561fbcc5884d4844606d9869ebaacc7d0da5ce Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 18 Dec 2023 11:16:54 -0600 Subject: [PATCH 254/495] fix undefined variable --- plugins/action/zos_job_submit.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 12ec5514a..7906dfa38 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -57,9 +57,10 @@ def run(self, tmp=None, task_vars=None): source = self._task.args.get("src", None) # Get a temporary file on the managed node - dest_path = self._execute_module( + tempfile = self._execute_module( module_name="tempfile", module_args={}, task_vars=task_vars, - ).get("path") + ) + dest_path = tempfile.get("path") # Calling execute_module from this step with tempfile leaves behind a tmpdir. # This is called to ensure the proper removal. tmpdir = self._connection._shell.tmpdir From f501463ab0c7674c26b11bd2e76e2276d44f193f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 18 Dec 2023 11:36:22 -0600 Subject: [PATCH 255/495] Add changelog --- changelogs/fragments/1101-fix-undefined-var.yml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 changelogs/fragments/1101-fix-undefined-var.yml diff --git a/changelogs/fragments/1101-fix-undefined-var.yml b/changelogs/fragments/1101-fix-undefined-var.yml new file mode 100644 index 000000000..fc0c2be16 --- /dev/null +++ b/changelogs/fragments/1101-fix-undefined-var.yml @@ -0,0 +1,2 @@ +trivial: + - zos_job_submit - Fix undefined variable that got deleted during a conflicting merge. (https://github.com/ansible-collections/ibm_zos_core/pull/1101) \ No newline at end of file From f84389fbe0c0021a84402323efc009bc89a3a2ce Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 18 Dec 2023 11:42:42 -0600 Subject: [PATCH 256/495] Modified changelog --- changelogs/fragments/1101-fix-undefined-var.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/changelogs/fragments/1101-fix-undefined-var.yml b/changelogs/fragments/1101-fix-undefined-var.yml index fc0c2be16..1d9eeba3c 100644 --- a/changelogs/fragments/1101-fix-undefined-var.yml +++ b/changelogs/fragments/1101-fix-undefined-var.yml @@ -1,2 +1,3 @@ trivial: - - zos_job_submit - Fix undefined variable that got deleted during a conflicting merge. (https://github.com/ansible-collections/ibm_zos_core/pull/1101) \ No newline at end of file + - zos_job_submit - Fix undefined variable that got deleted during a conflicting merge. + (https://github.com/ansible-collections/ibm_zos_core/pull/1101). \ No newline at end of file From 753e1072b3136c669d7ac1a3c9809579683d9260 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 14:38:39 -0600 Subject: [PATCH 257/495] Change in fetch --- tests/functional/modules/test_zos_fetch_func.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index bc1154de2..7ca003e16 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -161,6 +161,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PS, state="present", size="5m") params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS try: @@ -172,6 +173,7 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): os.remove(dest_path) From da5b8c7292fd08bb6cfd18ceabcb7a024d2ecc3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 15:04:42 -0600 Subject: [PATCH 258/495] Modify fetch --- .../functional/modules/test_zos_fetch_func.py | 30 +++++++++++++++++-- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 7ca003e16..b496e2750 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -12,9 +12,7 @@ # limitations under the License. from __future__ import absolute_import, division, print_function -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - extract_member_name -) + import os import shutil import stat @@ -79,6 +77,15 @@ /* """ +def extract_member_name(data_set): + start = data_set.find("(") + member = "" + for i in range(start + 1, len(data_set)): + if data_set[i] == ")": + break + member += data_set[i] + return member + def create_and_populate_test_ps_vb(ansible_zos_module): params=dict( name=TEST_PS_VB, @@ -162,6 +169,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_data_set(name=TEST_PS, state="present", size="5m") + hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS try: @@ -199,6 +207,8 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PDS, state="present") + hosts.all.zos_lineinfile(path=TEST_PDS, line="unset ZOAU_ROOT", state="present") params = dict(src=TEST_PDS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PDS try: @@ -211,6 +221,7 @@ def test_fetch_partitioned_data_set(ansible_zos_module): assert os.path.exists(dest_path) assert os.path.isdir(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): shutil.rmtree(dest_path) @@ -278,6 +289,9 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PDS, state="present") + hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") + hosts.all.zos_lineinfile(path=TEST_PDS_MEMBER, line="unset ZOAU_ROOT", state="present") params = dict( src=TEST_PDS_MEMBER, dest="/tmp/", flat=True, is_binary=True ) @@ -293,12 +307,15 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): assert os.path.exists(dest_path) assert os.path.isfile(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): os.remove(dest_path) def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PS, state="present") + hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PS try: @@ -310,12 +327,15 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): assert result.get("is_binary") is True assert os.path.exists(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): os.remove(dest_path) def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_PDS, state="present") + hosts.all.zos_lineinfile(path=TEST_PDS, line="unset ZOAU_ROOT", state="present") params = dict(src=TEST_PDS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PDS try: @@ -328,6 +348,7 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): assert os.path.exists(dest_path) assert os.path.isdir(dest_path) finally: + hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): shutil.rmtree(dest_path) @@ -474,6 +495,8 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module ds_name = TEST_PS + hosts.all.zos_data_set(name=TEST_PS, state="present") + hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") dest_path = "/tmp/" + ds_name with open(dest_path, "w") as infile: infile.write(DUMMY_DATA) @@ -487,6 +510,7 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): assert result.get("module_stderr") is None assert checksum(dest_path, hash_func=sha256) != local_checksum finally: + hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): os.remove(dest_path) From 9c7344f406753c5e848a876bc13ee67f992dd5f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 17:30:57 -0600 Subject: [PATCH 259/495] Fix fetch --- .../functional/modules/test_zos_fetch_func.py | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index b496e2750..62abd1cf8 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -168,8 +168,9 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_PS, state="present", size="5m") - hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") + TEST_PS = "USER.TEST.FETCH" + hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS try: @@ -207,8 +208,11 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_PDS, state="present") - hosts.all.zos_lineinfile(path=TEST_PDS, line="unset ZOAU_ROOT", state="present") + TEST_PDS = "USER.TEST.FETCH" + TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' + hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") + hosts.all.zos_blockinfile(src=TEST_PDS, block=TEST_DATA) params = dict(src=TEST_PDS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PDS try: @@ -291,7 +295,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_data_set(name=TEST_PDS, state="present") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.zos_lineinfile(path=TEST_PDS_MEMBER, line="unset ZOAU_ROOT", state="present") + hosts.all.zos_blockinfile(src=TEST_PDS_MEMBER, block=TEST_DATA) params = dict( src=TEST_PDS_MEMBER, dest="/tmp/", flat=True, is_binary=True ) @@ -314,8 +318,9 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_PS, state="present") - hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") + TEST_PS = "USER.TEST.FETCH" + hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PS try: @@ -334,8 +339,11 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_PDS, state="present") - hosts.all.zos_lineinfile(path=TEST_PDS, line="unset ZOAU_ROOT", state="present") + TEST_PDS = "USER.TEST.FETCH" + TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' + hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") + hosts.all.zos_blockinfile(src=TEST_PDS, block=TEST_DATA) params = dict(src=TEST_PDS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PDS try: @@ -494,9 +502,11 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module + TEST_PS = "USER.TEST.FETCH" + hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") - hosts.all.zos_lineinfile(path=TEST_PS, line="unset ZOAU_ROOT", state="present") + hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) dest_path = "/tmp/" + ds_name with open(dest_path, "w") as infile: infile.write(DUMMY_DATA) From d608196d1452e3eda01d23f5fe7583a86c9c5be1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 18:29:54 -0600 Subject: [PATCH 260/495] Fix find and continue with fetch --- .../functional/modules/test_zos_fetch_func.py | 1 + .../functional/modules/test_zos_find_func.py | 31 +++++++++++++------ 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 62abd1cf8..c5b1d6a86 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -276,6 +276,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module + hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="present", type="KSDS") params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM try: diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 345927fe5..0c75ce91e 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -216,20 +216,31 @@ def test_find_data_sets_older_than_age(ansible_zos_module): def test_find_data_sets_larger_than_size(ansible_zos_module): hosts = ansible_zos_module - find_res = hosts.all.zos_find(patterns=['IMSTESTL.MQBATCH.*'], size='100k') - print(vars(find_res)) - for val in find_res.contacted.values(): - assert len(val.get('data_sets')) == 2 - assert val.get('matched') == 2 + TEST_PS1 = 'TEST.PS.ONE' + TEST_PS2 = 'TEST.PS.TWO' + try: + res = hosts.all.zos_data_set(name=TEST_PS1, state="present", size="5m") + res = hosts.all.zos_data_set(name=TEST_PS2, state="present", size="5m") + find_res = hosts.all.zos_find(patterns=['TEST.PS.*'], size="1k") + for val in find_res.contacted.values(): + assert len(val.get('data_sets')) == 2 + assert val.get('matched') == 2 + finally: + hosts.all.zos_data_set(name=TEST_PS1, state="absent") + hosts.all.zos_data_set(name=TEST_PS2, state="absent") def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts = ansible_zos_module - find_res = hosts.all.zos_find(patterns=['IMSTESTL.MQBATCH.*'], size='-1m') - print(vars(find_res)) - for val in find_res.contacted.values(): - assert len(val.get('data_sets')) == 1 - assert val.get('matched') == 1 + TEST_PS = 'IMSTESTL.MQBATCH.PS' + try: + hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="1k") + find_res = hosts.all.zos_find(patterns=['IMSTESTL.MQBATCH.*'], size='-1m') + for val in find_res.contacted.values(): + assert len(val.get('data_sets')) == 1 + assert val.get('matched') == 1 + finally: + hosts.all.zos_data_set(name=TEST_PS, state="absent") def test_find_data_sets_in_volume(ansible_zos_module): From f470f2fb81d9a6b206ab132ea54e82b458947b8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 18:38:21 -0600 Subject: [PATCH 261/495] Catalog a vsam --- tests/functional/modules/test_zos_fetch_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index c5b1d6a86..459eb7b86 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -276,7 +276,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="present", type="KSDS") + hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="present", type="KSDS", state="cataloged", volumes="000000") params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM try: From c7369866eb7e94756f61d10f7cf36ceaf77a8222 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 18:43:38 -0600 Subject: [PATCH 262/495] Catalog a vsam --- tests/functional/modules/test_zos_fetch_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 459eb7b86..316f5fbdd 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -276,7 +276,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="present", type="KSDS", state="cataloged", volumes="000000") + hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, type="KSDS", state="cataloged", volumes="000000") params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM try: From dff9a222419fac105262060a37ef95afce33ffc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Tue, 2 Jan 2024 18:44:06 -0600 Subject: [PATCH 263/495] Catalog a vsam --- tests/functional/modules/test_zos_fetch_func.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 316f5fbdd..29a60754b 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -288,6 +288,7 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) finally: + hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="absent") if os.path.exists(dest_path): os.remove(dest_path) From 2b6fd8a6354b270eec0c27cadae3c422fcbb3f0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 11:17:17 -0600 Subject: [PATCH 264/495] Add name to vsam --- tests/functional/modules/test_zos_fetch_func.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 29a60754b..3e3c3fc28 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -276,6 +276,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module + TEST_EMPTY_VSAM = "TEST.VSAM.DATA" hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, type="KSDS", state="cataloged", volumes="000000") params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM From e8db7353e6fc8072a845be74fc1af86ad8fbb5a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 11:53:19 -0600 Subject: [PATCH 265/495] Add name to vsam --- .../functional/modules/test_zos_fetch_func.py | 36 +++++++++++++++++-- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 3e3c3fc28..b030f2167 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -110,6 +110,36 @@ def delete_test_ps_vb(ansible_zos_module): ansible_zos_module.all.zos_data_set(**params) +def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, key_offset=None): + """Creates a new VSAM on the system. + + Arguments: + hosts (object) -- Ansible instance(s) that can call modules. + name (str) -- Name of the VSAM data set. + type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS) + add_data (bool, optional) -- Whether to add records to the VSAM. + key_length (int, optional) -- Key length (only for KSDS data sets). + key_offset (int, optional) -- Key offset (only for KSDS data sets). + """ + params = dict( + name=name, + type=ds_type, + state="present" + ) + if ds_type == "KSDS": + params["key_length"] = key_length + params["key_offset"] = key_offset + + hosts.all.zos_data_set(**params) + + if add_data: + record_src = "/tmp/zos_copy_vsam_record" + + hosts.all.zos_copy(content=VSAM_RECORDS, dest=record_src) + hosts.all.zos_encode(src=record_src, dest=name, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) + hosts.all.file(path=record_src, state="absent") + + def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module params = dict(src="/etc/profile", dest="/tmp/", flat=True) @@ -276,8 +306,8 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module - TEST_EMPTY_VSAM = "TEST.VSAM.DATA" - hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, type="KSDS", state="cataloged", volumes="000000") + src_ds = "TEST.VSAM.DATA" + create_vsam_data_set(hosts, src_ds, "KSDS", add_data=True, key_length=12, key_offset=0) params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_EMPTY_VSAM try: @@ -289,7 +319,7 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) finally: - hosts.all.zos_data_set(name=TEST_EMPTY_VSAM, state="absent") + hosts.all.zos_data_set(name=src_ds, state="absent") if os.path.exists(dest_path): os.remove(dest_path) From c6c8459865ef879a0cf171b7227c3bcb944f6add Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 12:02:06 -0600 Subject: [PATCH 266/495] Add name to vsam --- tests/functional/modules/test_zos_fetch_func.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index b030f2167..a99c1cf18 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -77,6 +77,11 @@ /* """ +VSAM_RECORDS = """00000001A record +00000002A record +00000003A record +""" + def extract_member_name(data_set): start = data_set.find("(") member = "" From 75a1e348e71e54d73b87c7cf6bc0272330348f7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 12:15:40 -0600 Subject: [PATCH 267/495] Add name to vsam --- tests/functional/modules/test_zos_fetch_func.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index a99c1cf18..8f15d0fd8 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -312,9 +312,9 @@ def test_fetch_vsam_data_set(ansible_zos_module): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module src_ds = "TEST.VSAM.DATA" - create_vsam_data_set(hosts, src_ds, "KSDS", add_data=True, key_length=12, key_offset=0) - params = dict(src=TEST_EMPTY_VSAM, dest="/tmp/", flat=True) - dest_path = "/tmp/" + TEST_EMPTY_VSAM + create_vsam_data_set(hosts, src_ds, "KSDS", key_length=12, key_offset=0) + params = dict(src=src_ds, dest="/tmp/", flat=True) + dest_path = "/tmp/" + src_ds try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): From 65c48c0276ce0c12abc6e701b8f37c00cf43e419 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Wed, 3 Jan 2024 16:21:56 -0600 Subject: [PATCH 268/495] Delete and add names to pds with member --- tests/functional/modules/test_zos_fetch_func.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 8f15d0fd8..9593d9307 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -331,6 +331,8 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = "TEST.DATASET.TEST" + TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS, state="present") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.zos_blockinfile(src=TEST_PDS_MEMBER, block=TEST_DATA) From 5d495d0efb517ce4f198efc502ec82776f707126 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 17:50:21 -0600 Subject: [PATCH 269/495] Remove other modules dependencies --- .../functional/modules/test_zos_fetch_func.py | 36 +++++++------------ 1 file changed, 12 insertions(+), 24 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 8f15d0fd8..ef33a9483 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -30,12 +30,11 @@ """ -TEST_PS = "IMSTESTL.IMS01.DDCHKPT" +TEST_PS = "USER.PRIV.TEST" TEST_PS_VB = "USER.PRIV.PSVB" -TEST_PDS = "IMSTESTL.COMNUC" -TEST_PDS_MEMBER = "IMSTESTL.COMNUC(ATRQUERY)" +TEST_PDS = "USER.PRIV.TESTPDS" +TEST_PDS_MEMBER = "USER.PRIV.TESTPDS(ATRQUERY)" TEST_VSAM = "FETCH.TEST.VS" -TEST_EMPTY_VSAM = "IMSTESTL.LDS01.WADS0" FROM_ENCODING = "IBM-1047" TO_ENCODING = "ISO8859-1" USS_FILE = "/tmp/fetch.data" @@ -100,11 +99,7 @@ def create_and_populate_test_ps_vb(ansible_zos_module): block_size='3190' ) ansible_zos_module.all.zos_data_set(**params) - params = dict( - src=TEST_PS_VB, - block=TEST_DATA - ) - ansible_zos_module.all.zos_blockinfile(**params) + ansible_zos_module.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS_VB)) def delete_test_ps_vb(ansible_zos_module): @@ -115,7 +110,7 @@ def delete_test_ps_vb(ansible_zos_module): ansible_zos_module.all.zos_data_set(**params) -def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, key_offset=None): +def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None): """Creates a new VSAM on the system. Arguments: @@ -137,13 +132,6 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, hosts.all.zos_data_set(**params) - if add_data: - record_src = "/tmp/zos_copy_vsam_record" - - hosts.all.zos_copy(content=VSAM_RECORDS, dest=record_src) - hosts.all.zos_encode(src=record_src, dest=name, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) - hosts.all.file(path=record_src, state="absent") - def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module @@ -205,7 +193,7 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") - hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS try: @@ -247,7 +235,7 @@ def test_fetch_partitioned_data_set(ansible_zos_module): TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.zos_blockinfile(src=TEST_PDS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict(src=TEST_PDS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PDS try: @@ -333,7 +321,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_data_set(name=TEST_PDS, state="present") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.zos_blockinfile(src=TEST_PDS_MEMBER, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict( src=TEST_PDS_MEMBER, dest="/tmp/", flat=True, is_binary=True ) @@ -358,7 +346,7 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") - hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PS try: @@ -381,7 +369,7 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.zos_blockinfile(src=TEST_PDS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict(src=TEST_PDS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PDS try: @@ -544,7 +532,7 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") - hosts.all.zos_blockinfile(src=TEST_PS, block=TEST_DATA) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) dest_path = "/tmp/" + ds_name with open(dest_path, "w") as infile: infile.write(DUMMY_DATA) From 4dbe17d0caa05f758cab1feaf5b47a6ccbb5f7be Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 17:52:26 -0600 Subject: [PATCH 270/495] Remove hardcoded content --- tests/functional/modules/test_zos_fetch_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index b1bd2db5d..d60a82011 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -33,7 +33,7 @@ TEST_PS = "USER.PRIV.TEST" TEST_PS_VB = "USER.PRIV.PSVB" TEST_PDS = "USER.PRIV.TESTPDS" -TEST_PDS_MEMBER = "USER.PRIV.TESTPDS(ATRQUERY)" +TEST_PDS_MEMBER = "USER.PRIV.TESTPDS(MEM1)" TEST_VSAM = "FETCH.TEST.VS" FROM_ENCODING = "IBM-1047" TO_ENCODING = "ISO8859-1" From 8a0cf6c051c11e4dc90ea1ee342330a91bc2305a Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 18:35:15 -0600 Subject: [PATCH 271/495] Modified variable definitions --- tests/functional/modules/test_zos_fetch_func.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index d60a82011..cf942dc09 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -191,7 +191,6 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module - TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True) @@ -231,8 +230,6 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module - TEST_PDS = "USER.TEST.FETCH" - TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -319,8 +316,6 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module - TEST_PDS = "TEST.DATASET.TEST" - TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS, state="present") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -346,7 +341,6 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module - TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) From f91978b96f81f1fea1e19183d753939490d85190 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 19:10:52 -0600 Subject: [PATCH 272/495] removed variable assignments --- tests/functional/modules/test_zos_fetch_func.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index cf942dc09..c55162cd2 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -361,8 +361,6 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module - TEST_PDS = "USER.TEST.FETCH" - TEST_PDS_MEMBER = TEST_PDS + '(MEM1)' hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -524,7 +522,6 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module - TEST_PS = "USER.TEST.FETCH" hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") From 6fb2d464a8346e9713794b2a5713236d69e49eb1 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 3 Jan 2024 19:33:27 -0600 Subject: [PATCH 273/495] Changed IMSTESTL name --- tests/functional/modules/test_zos_find_func.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 0c75ce91e..79df4efac 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -232,10 +232,10 @@ def test_find_data_sets_larger_than_size(ansible_zos_module): def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts = ansible_zos_module - TEST_PS = 'IMSTESTL.MQBATCH.PS' + TEST_PS = 'USER.FIND.TEST' try: hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="1k") - find_res = hosts.all.zos_find(patterns=['IMSTESTL.MQBATCH.*'], size='-1m') + find_res = hosts.all.zos_find(patterns=['USER.FIND.*'], size='-1m') for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == 1 From 9f3d982ec2bb1bc9a694c7676b68dc5fe153b0eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <amgutierrezbenitez@hotmail.com> Date: Thu, 4 Jan 2024 12:04:51 -0600 Subject: [PATCH 274/495] Add fragment --- .../1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml diff --git a/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml b/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml new file mode 100644 index 000000000..7a470d57c --- /dev/null +++ b/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml @@ -0,0 +1,4 @@ +trivial: + - zos_fetch - remove hardcoded datasets and dependencies from test cases. + - zos_find - remove hardcoded datasets and dependencies from test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1158). \ No newline at end of file From dd71ecbc9ee8752d9635155434fefe372ccba954 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 5 Jan 2024 09:39:26 -0600 Subject: [PATCH 275/495] [Bugfix][v1.9.0]short job name sends back a value error with a full stack trace as the msg (#1078) * Add types to better_args_parser * Remove unused re import * Manage spaces * Manage documentation * Manage documentation * Modify regex * Modify regex * Add fragment * Test cases * Change bool to str * Solve documentation * Remove runtime error * Remove re * Test case for owner and job id and add to changelog * Cover all cases --- ...ort_job_name_sends_back_a_value_error.yaml | 11 +++ plugins/module_utils/better_arg_parser.py | 30 +++++++ plugins/modules/zos_job_output.py | 20 +++++ plugins/modules/zos_job_query.py | 83 +++++-------------- .../modules/test_zos_job_output_func.py | 7 ++ .../modules/test_zos_job_query_func.py | 15 +++- 6 files changed, 104 insertions(+), 62 deletions(-) create mode 100644 changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml diff --git a/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml b/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml new file mode 100644 index 000000000..dd9dc98a5 --- /dev/null +++ b/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml @@ -0,0 +1,11 @@ +bugfixes: + - zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. + Change now allows the use of a shorter job ID or name, as well as wildcards. + (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. + Change now allows the use of a shorter job ID or name, as well as wildcards. + (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + +minor_change: + - zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. + (https://github.com/ansible-collections/ibm_zos_core/pull/1078). \ No newline at end of file diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index 6262d4110..6720f8d10 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -148,6 +148,7 @@ def __init__(self, arg_name, contents, resolved_args, arg_defs): "data_set_or_path": self._data_set_or_path_type, "encoding": self._encoding_type, "dd": self._dd_type, + "job_identifier": self._job_identifier, } def handle_arg(self): @@ -743,6 +744,35 @@ def _call_arg_function(self, arg_function, contents): ) ) + # ---------------------------------------------------------------------------- # + # JOB ID AND JOB NAME NAMING RULES # + # ---------------------------------------------------------------------------- # + + def _job_identifier(self, contents, resolve_dependencies): + """Resolver for data_set type arguments. + A text string of up to 8 characters. + The first character must be a letter or a national (#, $, @) character. + Other characters can be letters, numbers, or national (#, $, @) characters. + If the text string contains #, $, or @, enclose the text string in single or double quotation marks. + + Arguments: + contents {str} -- The contents of the argument. + + Raises: + ValueError: When contents is invalid argument type + Returns: + str -- The arguments contents after any necessary operations. + """ + if not fullmatch( + r"(^[a-zA-Z$#@%}]{1}[0-9a-zA-Z$#@%*]{1,7})|(^['\*']{1})", + str(contents), + IGNORECASE, + ): + raise ValueError( + 'Invalid argument "{0}" for type "job_id or job_name".'.format(contents) + ) + return str(contents) + class BetterArgParser(object): def __init__(self, arg_dict): diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index ec4aa0313..40c7d61d0 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -417,6 +417,9 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import ( job_output, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser +) def run_module(): @@ -429,6 +432,23 @@ def run_module(): module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) + args_def = dict( + job_id=dict(type="job_identifier", required=False), + job_name=dict(type="job_identifier", required=False), + owner=dict(type="str", required=False), + ddname=dict(type="str", required=False), + ) + + try: + parser = better_arg_parser.BetterArgParser(args_def) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json( + msg='Parameter verification failed.', + stderr=str(err) + ) + job_id = module.params.get("job_id") job_name = module.params.get("job_name") owner = module.params.get("owner") diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index cf94fa684..aaa72d9ab 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -258,10 +258,11 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import ( job_status, ) - +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser +) from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_text -import re def run_module(): @@ -276,11 +277,29 @@ def run_module(): module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) + args_def = dict( + job_name=dict(type="job_identifier", required=False), + owner=dict(type="str", required=False), + job_id=dict(type="job_identifier", required=False), + ) + + try: + parser = better_arg_parser.BetterArgParser(args_def) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json( + msg='Parameter verification failed.', + stderr=str(err) + ) + if module.check_mode: return result try: - name, id, owner = validate_arguments(module.params) + name = module.params.get("job_name") + id = module.params.get("job_id") + owner = module.params.get("owner") jobs_raw = query_jobs(name, id, owner) if jobs_raw: jobs = parsing_jobs(jobs_raw) @@ -293,64 +312,6 @@ def run_module(): module.exit_json(**result) -# validate_arguments returns a tuple, so we don't have to rebuild the job_name string -def validate_arguments(params): - job_name_in = params.get("job_name") - - job_id = params.get("job_id") - - owner = params.get("owner") - if job_name_in or job_id: - if job_name_in and job_name_in != "*": - job_name_pattern = re.compile(r"^[a-zA-Z$#@%][0-9a-zA-Z$#@%]{0,7}$") - job_name_pattern_with_star = re.compile( - r"^[a-zA-Z$#@%][0-9a-zA-Z$#@%]{0,6}\*$" - ) - test_basic = job_name_pattern.search(job_name_in) - test_star = job_name_pattern_with_star.search(job_name_in) - # logic twist: test_result should be a non-null value from test_basic or test_star - test_result = test_basic - if test_star: - test_result = test_star - - job_name_short = "unused" - # if neither test_basic nor test_star were non-null, check if the string needed to be truncated to the first * - if not test_result: - ix = job_name_in.find("*") - if ix >= 0: - job_name_short = job_name_in[0:ix + 1] - test_result = job_name_pattern.search(job_name_short) - if not test_result: - test_result = job_name_pattern_with_star.search(job_name_short) - - # so now, fail if neither test_basic, test_star or test_base from job_name_short found a match - if not test_result: - raise RuntimeError("Unable to locate job name {0}.".format(job_name_in)) - - if job_id: - job_id_pattern = re.compile("(JOB|TSU|STC)[0-9]{5}|(J|T|S)[0-9]{7}$") - test_basic = job_id_pattern.search(job_id) - test_result = None - - if not test_basic: - ix = job_id.find("*") - if ix > 0: - # this differs from job_name, in that we'll drop the star for the search - job_id_short = job_id[0:ix] - - if job_id_short[0:3] in ['JOB', 'TSU', 'STC'] or job_id_short[0:1] in ['J', 'T', 'S']: - test_result = job_id_short - - if not test_basic and not test_result: - raise RuntimeError("Failed to validate the job id: " + job_id) - else: - raise RuntimeError("Argument Error:Either job name(s) or job id is required") - if job_id and owner: - raise RuntimeError("Argument Error:job id can not be co-exist with owner") - - return job_name_in, job_id, owner - - def query_jobs(job_name, job_id, owner): jobs = [] diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 11b7cd90d..830828769 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -141,3 +141,10 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): assert job.get("ddnames")[0].get("ddname") == dd_name finally: hosts.all.file(path=TEMP_PATH, state="absent") + + +def test_zos_job_submit_job_id_and_owner_included(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_job_output(job_id="STC00*", owner="MASTER") + for result in results.contacted.values(): + assert result.get("jobs") is not None diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 7128f12a7..b7c412cd4 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -28,7 +28,6 @@ def test_zos_job_query_func(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_query(job_name="*", owner="*") - pprint(vars(results)) for result in results.contacted.values(): assert result.get("changed") is False assert result.get("jobs") is not None @@ -111,3 +110,17 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") hosts.all.zos_data_set(name=NDATA_SET_NAME, state="absent") + + +def test_zos_job_id_query_short_ids_func(ansible_zos_module): + hosts = ansible_zos_module + qresults = hosts.all.zos_job_query(job_id="STC003") + for qresult in qresults.contacted.values(): + assert qresult.get("jobs") is not None + + +def test_zos_job_id_query_short_ids_with_wilcard_func(ansible_zos_module): + hosts = ansible_zos_module + qresults = hosts.all.zos_job_query(job_id="STC00*") + for qresult in qresults.contacted.values(): + assert qresult.get("jobs") is not None From dd8db396e78776d3fe60eb85aaed8032becf8d87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 12 Jan 2024 20:51:21 -0600 Subject: [PATCH 276/495] [Enabler][zos_copy]Refactor calls to use new alias and execute options (#1163) * Refactor zos copy and test case fixed --- ...s_to_use_new_alias_and_execute_options.yml | 3 +++ plugins/modules/zos_copy.py | 19 +++---------------- .../functional/modules/test_zos_copy_func.py | 11 ++++++++--- 3 files changed, 14 insertions(+), 19 deletions(-) create mode 100644 changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml diff --git a/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml b/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml new file mode 100644 index 000000000..6cd512427 --- /dev/null +++ b/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Change call to ZOAU python API by using a dictionary to arguments. + (https://github.com/ansible-collections/ibm_zos_core/pull/1163). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 5d68d78a5..9d411f459 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1543,16 +1543,12 @@ def _mvs_copy_to_uss( except FileExistsError: pass - opts = dict() - if self.executable: - opts["options"] = "-IX " - try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: if self.asa_text: response = copy.copy_asa_mvs2uss(src, dest) elif self.executable: - response = datasets._copy(src, dest, None, **opts) + response = datasets._copy(src, dest, alias=True, executable=True) else: response = datasets._copy(src, dest) @@ -1565,7 +1561,7 @@ def _mvs_copy_to_uss( ) else: if self.executable: - response = datasets._copy(src, dest, None, **opts) + response = datasets._copy(src, dest, None, alias=True, executable=True) if response.rc != 0: raise CopyOperationError( @@ -1770,19 +1766,10 @@ def copy_to_member( if self.is_binary or self.asa_text: opts["options"] = "-B" - if self.aliases and not self.executable: - # lower case 'i' for text-based copy (dcp) - opts["options"] = "-i" - - if self.executable: - opts["options"] = "-X" - if self.aliases: - opts["options"] = "-IX" - if self.force_lock: opts["options"] += " -f" - response = datasets._copy(src, dest, None, **opts) + response = datasets._copy(src, dest, alias=self.aliases, executable=self.executable, **opts) rc, out, err = response.rc, response.stdout_response, response.stderr_response return dict( diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 42a08890a..15e1cd499 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -3550,8 +3550,13 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): assert v_cp.get("rc") == 0 stdout = v_cp.get("stdout") assert stdout is not None - # number of members - assert len(stdout.splitlines()) == 2 + # verify pgms remain executable + pgm_output_map = { + (dest_lib, pgm_mem, COBOL_PRINT_STR), + (dest_lib, pgm2_mem, COBOL_PRINT_STR2), + } + for steplib, pgm, output in pgm_output_map: + validate_loadlib_pgm(hosts, steplib=steplib, pgm_name=pgm, expected_output_str=output) finally: hosts.all.zos_data_set(name=cobol_src_pds, state="absent") @@ -3689,7 +3694,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): ) # copy USS dir to dest library pds w aliases copy_res_aliases = hosts.all.zos_copy( - src="{0}{1}".format(uss_dir_path, src_lib.upper()), + src="{0}/{1}".format(uss_dir_path, src_lib.upper()), dest="{0}".format(dest_lib_aliases), remote_src=True, executable=True, From df4189bb0c031a97fca96ded365e78f44050752f Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Mon, 15 Jan 2024 13:21:29 -0700 Subject: [PATCH 277/495] Update ZOAU list to include v1.3.0 (#1166) --- scripts/mounts.env | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/mounts.env b/scripts/mounts.env index 050887102..7240eaaeb 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -41,7 +41,8 @@ zoau_mount_list_str="1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ "13:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ "14:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ "15:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V125.ZFS "\ -"16:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " +"16:1.3.0:/zoau/v1.3.0:IMSTESTU.ZOAU.V130.ZFS "\ +"17:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " # ------------------------------------------------------------------------------ # PYTHON MOUNT TABLE From 2900ffbbc8a0454b59700f4afeeab7e7508298e7 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 16 Jan 2024 09:55:02 -0600 Subject: [PATCH 278/495] [v1.10.0][zos_lineinfile] Removed zos_copy dependency from test cases (#1152) * Changed zos_copy to echo * Modified test case * Added changelog --- ...-lineinfile-remove-zos_copy-dependency.yml | 3 ++ .../modules/test_zos_lineinfile_func.py | 34 +++++++++---------- 2 files changed, 20 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml diff --git a/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml b/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml new file mode 100644 index 000000000..44015bbd9 --- /dev/null +++ b/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile - remove zos_copy calls from test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1152). diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 754316ff3..94f94cb7a 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -29,14 +29,13 @@ int main(int argc, char** argv) { char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); + sprintf(dsname, \\\"//'%s'\\\", argv[1]); FILE* member; - member = fopen(dsname, "rb,type=record"); + member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); return 0; -} -""" +}""" call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //LOCKMEM EXEC PGM=BPXBATCH @@ -893,12 +892,12 @@ def test_ds_line_force(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format( + DEFAULT_DATA_SET_NAME, + MEMBER_1), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) @@ -946,12 +945,13 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.file(path="/tmp/disp_shr", state='directory') + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format( + DEFAULT_DATA_SET_NAME, + MEMBER_1), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) From f8ea3f02de1da551b847b132825eb761d69f4324 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 16 Jan 2024 09:55:21 -0600 Subject: [PATCH 279/495] [v1.10.0][zos_fetch] Remove zos_copy dependency from test cases (#1165) * Removed zos_copy from zos_fetch test cases * Added trailing char * Initial commit * Updated changelog * removed old fragment * Update 1165-remove-zos-copy-dep-from-zos-fetch.yml --- .../fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml | 3 +++ tests/functional/modules/test_zos_fetch_func.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml diff --git a/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml b/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml new file mode 100644 index 000000000..9c8593c1a --- /dev/null +++ b/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Remove zos_copy dependency from zos_fetch test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1165). diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index c55162cd2..3b4a9c371 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -263,7 +263,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True ) - hosts.all.zos_copy(content=TEST_DATA, dest=USS_FILE) + hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE)) hosts.all.zos_encode( src=USS_FILE, dest=TEST_VSAM, From 27c41131bc528bc0aa1499c9855743c6af122289 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 16 Jan 2024 09:57:51 -0600 Subject: [PATCH 280/495] [v1.10.0][zos_encode] Remove zos_copy dependency from test cases (#1157) * Removed zos_copy dependency * Added changelog --- .../fragments/1157-remove-zos-copy-from-zos-encode-tests.yml | 3 +++ tests/functional/modules/test_zos_encode_func.py | 3 +-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml diff --git a/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml b/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml new file mode 100644 index 000000000..24f2802d5 --- /dev/null +++ b/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml @@ -0,0 +1,3 @@ +trivial: + - zos_encode - Remove zos_copy dependency from zos_encode test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1157). diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 5f1e8cfbf..7b7952387 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -898,8 +898,7 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.shell(cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH)) - hosts.all.zos_copy(src=TEMP_JCL_PATH, dest=MVS_PS, remote_src=True) + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_FILE_TEXT, MVS_PS)) enc_ds = hosts.all.zos_encode( src=MVS_PS, encoding={ From 94180f534d58cfba251bb6c5ed1f4991c87a18cb Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 16 Jan 2024 10:00:10 -0600 Subject: [PATCH 281/495] [v1.10.0][zos_archive] Remove zos_copy depedency from test cases (#1156) * removed zos_copy from zos_archive tests * Added changelog --- .../1156-zos_archive-remove-zos_copy_dep.yml | 3 +++ tests/functional/modules/test_zos_archive_func.py | 14 ++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml diff --git a/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml b/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml new file mode 100644 index 000000000..ea8aacee9 --- /dev/null +++ b/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml @@ -0,0 +1,3 @@ +trivial: + - zos_archive - Remove zos_copy dependency from zos_archive test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1156). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 2705a7137..32bedb4fe 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -43,9 +43,9 @@ int main(int argc, char** argv) { char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); + sprintf(dsname, \\\"//'%s'\\\", argv[1]); FILE* member; - member = fopen(dsname, "rb,type=record"); + member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); return 0; @@ -857,12 +857,10 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ format_dict["format_options"] = dict(terse_pack="SPACK") # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(ds_to_write), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format(ds_to_write), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") # submit jcl From 066e76bff72e907bd3698217c607b9fa11165653 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 22 Jan 2024 13:51:17 -0600 Subject: [PATCH 282/495] [v1.10.0][zos_blockinfile] Remove zos_copy from test cases (#1167) * Removed zos_copy from test cases * Added changelog * Added file creation --- ...ve-zos-copy-from-zos-blockinfile-tests.yml | 3 +++ .../modules/test_zos_blockinfile_func.py | 26 +++++++++---------- 2 files changed, 15 insertions(+), 14 deletions(-) create mode 100644 changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml diff --git a/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml b/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml new file mode 100644 index 000000000..d7fb725af --- /dev/null +++ b/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml @@ -0,0 +1,3 @@ +trivial: + - zos_blockinfile - Remove zos_copy dependency from zos_blockinfile test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1167). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index d768ad59d..b2e567dc1 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -29,9 +29,9 @@ int main(int argc, char** argv) { char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); + sprintf(dsname, \\\"//'%s'\\\", argv[1]); FILE* member; - member = fopen(dsname, "rb,type=record"); + member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); return 0; @@ -1268,12 +1268,11 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.file(path="/tmp/disp_shr/", state="directory") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) @@ -1458,12 +1457,11 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True - ) + hosts.all.file(path="/tmp/disp_shr/", state="directory") + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format( + call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) From 410925dfa16b6e658e772d7e37cbd8e98af540ea Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 22 Jan 2024 13:51:42 -0600 Subject: [PATCH 283/495] [v1.10.0][zos_lineinfile] Remove zos encode from zos lineinfile (#1179) * Started removing encoding from tests * WIP * WIP * Added conversion * Removed zos_encode * Added changelog * Removed unused code --- ...move-zos_encode-from_zos_lineinfile-tests.yml | 3 +++ .../modules/test_zos_lineinfile_func.py | 16 ++++++++-------- 2 files changed, 11 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml diff --git a/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml b/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml new file mode 100644 index 000000000..a95e1c7e2 --- /dev/null +++ b/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile - Remove zos_encode dependency from zos_lineinfile test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1179). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 94f94cb7a..e415a76e8 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -242,7 +242,7 @@ def remove_ds_environment(ansible_zos_module, DS_NAME): # not supported data set types NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] # The encoding will be only use on a few test -ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] +ENCODING = [ 'ISO8859-1', 'UTF-8'] ######################### # USS test cases @@ -1005,7 +1005,7 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): def test_uss_encoding(ansible_zos_module, encoding): hosts = ansible_zos_module insert_data = "Insert this string" - params = dict(insertafter="SIMPLE", line=insert_data, state="present") + params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding}) params["encoding"] = encoding full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = "SIMPLE LINE TO VERIFY" @@ -1013,12 +1013,11 @@ def test_uss_encoding(ansible_zos_module, encoding): hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_LINEINFILE)) hosts.all.file(path=full_path, state="touch") hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) - hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"]) params["path"] = full_path results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {encoding} {full_path}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING finally: @@ -1032,7 +1031,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): hosts = ansible_zos_module ds_type = dstype insert_data = "Insert this string" - params = dict(insertafter="SIMPLE", line=insert_data, state="present") + params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding}) params["encoding"] = encoding test_name = "DST13" temp_file = "/tmp/{0}".format(test_name) @@ -1040,7 +1039,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): content = "SIMPLE LINE TO VERIFY" try: hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) - hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"]) + hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {params['encoding']} temp_file > temp_file ") hosts.all.zos_data_set(name=ds_name, type=ds_type) if ds_type in ["PDS", "PDSE"]: ds_full_name = ds_name + "(MEM)" @@ -1055,9 +1054,10 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - hosts.all.zos_encode(src=ds_full_name, dest=ds_full_name, from_encoding=params["encoding"], to_encoding="IBM-1047") - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + hosts.all.shell(cmd=f"iconv -f {encoding} -t IBM-1047 \"{ds_full_name}\" > \"{ds_full_name}\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(ds_full_name)) for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_ENCODING finally: remove_ds_environment(ansible_zos_module, ds_name) \ No newline at end of file From 95f8c23fb6856ac68e204709aef263e9ffa00b62 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 29 Jan 2024 19:37:33 -0600 Subject: [PATCH 284/495] [v1.10.0] [zos_copy] Enhance zos_copy performance when copying multiple PDS members (#1183) * [v1.9.0] [zos_copy] Enhancement/764/copy members (#1176) * Changed member copy into a bulk call * Modified copy to members * Cleaned code * Removed hardcoded content from zos_copy tests * Added fix for uss files * Added distinction between uss and mvs * Added alias fix * Moved the copy section to below * Modified for seq test * Added fix for copy dest lock * Added msgs for debugging * Added final changes to member copy * Added copy for when seq to pdse * Add changelog * Added a line into docs * Modified doc * Modified doc * Update changelog * Created a new changelog * Corrected typo --- changelogs/fragments/1183-copy-members.yml | 3 + plugins/modules/zos_copy.py | 61 +++++++++++++------ .../functional/modules/test_zos_copy_func.py | 11 ++-- 3 files changed, 52 insertions(+), 23 deletions(-) create mode 100644 changelogs/fragments/1183-copy-members.yml diff --git a/changelogs/fragments/1183-copy-members.yml b/changelogs/fragments/1183-copy-members.yml new file mode 100644 index 000000000..b0b0c7896 --- /dev/null +++ b/changelogs/fragments/1183-copy-members.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. + (https://github.com/ansible-collections/ibm_zos_core/pull/1183). diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9d411f459..e07b44a97 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -283,6 +283,8 @@ - If C(src) is a directory and ends with "/", the contents of it will be copied into the root of C(dest). If it doesn't end with "/", the directory itself will be copied. + - If C(src) is a directory or a file, file names will be truncated and/or modified + to ensure a valid name for a data set or member. - If C(src) is a VSAM data set, C(dest) must also be a VSAM. - Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. @@ -1705,33 +1707,56 @@ def copy_to_pdse( existing_members = datasets.list_members(dest) # fyi - this list includes aliases overwritten_members = [] new_members = [] + bulk_src_members = "" + result = dict() for src_member, destination_member in zip(src_members, dest_members): if destination_member in existing_members: overwritten_members.append(destination_member) else: new_members.append(destination_member) - + bulk_src_members += "{0} ".format(src_member) + + # Copy section + if src_ds_type == "USS" or self.asa_text or len(src_members) == 1: + """ + USS -> MVS : Was kept on member by member basis bc file names longer than 8 + characters will throw an error when copying to a PDS, because of the member name + character limit. + MVS -> MVS (asa only): This has to be copied on member by member basis bc OPUT + does not allow for bulk member copy or entire PDS to PDS copy. + """ + for src_member, destination_member in zip(src_members, dest_members): + result = self.copy_to_member( + src_member, + "{0}({1})".format(dest, destination_member), + src_ds_type + ) + else: + """ + MVS -> MVS + Copies a list of members into a PDS, using this list of members greatly + enhances performance of datasets_copy. + """ result = self.copy_to_member( - src_member, - "{0}({1})".format(dest, destination_member), + bulk_src_members, + dest, src_ds_type ) - if result["rc"] != 0: - msg = "Unable to copy source {0} to data set member {1}({2})".format( - new_src, - dest, - destination_member - ) - raise CopyOperationError( - msg=msg, - rc=result["rc"], - stdout=result["out"], - stderr=result["err"], - overwritten_members=overwritten_members, - new_members=new_members - ) + if result["rc"] != 0: + msg = "Unable to copy source {0} to {1}.".format( + new_src, + dest + ) + raise CopyOperationError( + msg=msg, + rc=result["rc"], + stdout=result["out"], + stderr=result["err"], + overwritten_members=overwritten_members, + new_members=new_members + ) def copy_to_member( self, diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 15e1cd499..b6fee6689 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -1958,7 +1958,7 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq -@pytest.mark.parametrize("ds_type", ["PDS", "PDSE", "SEQ"]) +@pytest.mark.parametrize("ds_type", [ "PDS", "PDSE", "SEQ"]) def test_copy_dest_lock(ansible_zos_module, ds_type): DATASET_1 = "USER.PRIVATE.TESTDS" DATASET_2 = "ADMI.PRIVATE.TESTDS" @@ -1971,8 +1971,8 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): dest_data_set = DATASET_2 try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DATASET_1, state="present", type="pdse", replace=True) - hosts.all.zos_data_set(name=DATASET_2, state="present", type="pdse", replace=True) + hosts.all.zos_data_set(name=DATASET_1, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=DATASET_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) @@ -4323,9 +4323,10 @@ def test_copy_data_set_to_volume(ansible_zos_module, src_type): hosts = ansible_zos_module source = "USER.TEST.FUNCTEST.SRC" dest = "USER.TEST.FUNCTEST.DEST" - + source_member = "USER.TEST.FUNCTEST.SRC(MEMBER)" try: hosts.all.zos_data_set(name=source, type=src_type, state='present') + hosts.all.zos_data_set(name=source_member, type="member", state='present') copy_res = hosts.all.zos_copy( src=source, dest=dest, From ad72db6add10730163037ed7b5f8e7e12695e3d8 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Tue, 30 Jan 2024 10:01:26 -0800 Subject: [PATCH 285/495] zos_operator - ZOAU 1.3.0 migration (#1181) * adjust value of timeout param which the module measures in seconds to centiseconds for zoau v1.3.x+ migration Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename timeout param in helper functions to distinguish unit of measurement - timeout_s and timeout_c Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright year Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1181-zoau-migration-zos_operator.yml | 4 ++++ plugins/modules/zos_operator.py | 12 ++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1181-zoau-migration-zos_operator.yml diff --git a/changelogs/fragments/1181-zoau-migration-zos_operator.yml b/changelogs/fragments/1181-zoau-migration-zos_operator.yml new file mode 100644 index 000000000..7c107de88 --- /dev/null +++ b/changelogs/fragments/1181-zoau-migration-zos_operator.yml @@ -0,0 +1,4 @@ +trivial: + - zos_operator - Update internal functions to account for the change to the + unit of measurement of `timeout` now in centiseconds. + (https://github.com/ansible-collections/ibm_zos_core/pull/1181). \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index ca6935163..6281c5cd6 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -172,9 +172,13 @@ ZOAU_API_VERSION = "1.2.0" -def execute_command(operator_cmd, timeout=1, *args, **kwargs): +def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): + + # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: + timeout_c = 100 * timeout_s + start = timer() - response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) + response = opercmd.execute(operator_cmd, timeout=timeout_c, *args, **kwargs) end = timer() rc = response.rc stdout = response.stdout_response @@ -293,7 +297,7 @@ def run_operator_command(params): kwargs.update({"wait": True}) args = [] - rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs) if rc > 0: message = "\nOut: {0}\nErr: {1}\nRan: {2}".format(stdout, stderr, cmdtxt) From 4caa946420ad64690407ebf768cee963100e6300 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 30 Jan 2024 11:03:09 -0700 Subject: [PATCH 286/495] [v1.10.0] [Enabler] [job] ZOAU v1.3.0 migration for job.py (#1169) * Update calls to jobs * Update copyright years * Add changelog fragment * Rename changelog fragment * Fix references to None types * Update 1169-util-job-zoau-migration.yml --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1169-util-job-zoau-migration.yml | 3 + plugins/module_utils/job.py | 90 +++++++++++++------ 2 files changed, 66 insertions(+), 27 deletions(-) create mode 100644 changelogs/fragments/1169-util-job-zoau-migration.yml diff --git a/changelogs/fragments/1169-util-job-zoau-migration.yml b/changelogs/fragments/1169-util-job-zoau-migration.yml new file mode 100644 index 000000000..568aa9a4e --- /dev/null +++ b/changelogs/fragments/1169-util-job-zoau-migration.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/job.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1169). diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 1b8cb06f6..3d7d80d68 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -71,16 +71,27 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru ) parser = BetterArgParser(arg_defs) - parsed_args = parser.parse_args( - {"job_id": job_id, "owner": owner, "job_name": job_name, "dd_name": dd_name} - ) + parsed_args = parser.parse_args({ + "job_id": job_id, + "owner": owner, + "job_name": job_name, + "dd_name": dd_name + }) job_id = parsed_args.get("job_id") or "*" job_name = parsed_args.get("job_name") or "*" owner = parsed_args.get("owner") or "*" dd_name = parsed_args.get("dd_name") or "" - job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, - dd_name=dd_name, duration=duration, dd_scan=dd_scan, timeout=timeout, start_time=start_time) + job_detail = _get_job_status( + job_id=job_id, + owner=owner, + job_name=job_name, + dd_name=dd_name, + duration=duration, + dd_scan=dd_scan, + timeout=timeout, + start_time=start_time + ) # while ((job_detail is None or len(job_detail) == 0) and duration <= timeout): # current_time = timer() @@ -92,13 +103,22 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru job_id = "" if job_id == "*" else job_id owner = "" if owner == "*" else owner job_name = "" if job_name == "*" else job_name - job_detail = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, - dd_name=dd_name, dd_scan=dd_scan, duration=duration, timeout=timeout, start_time=start_time) + + job_detail = _get_job_status( + job_id=job_id, + owner=owner, + job_name=job_name, + dd_name=dd_name, + dd_scan=dd_scan, + duration=duration, + timeout=timeout, + start_time=start_time + ) return job_detail def _job_not_found(job_id, owner, job_name, dd_name): - # Note that the text in the msg_txt is used in test cases thus sensitive to change + # Note that the text in the msg_txt is used in test cases and thus sensitive to change jobs = [] if job_id != '*' and job_name != '*': job_not_found_msg = "{0} with the job_id {1}".format(job_name.upper(), job_id.upper()) @@ -170,13 +190,24 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): job_name = parsed_args.get("job_name") or "*" owner = parsed_args.get("owner") or "*" - job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False) + job_status_result = _get_job_status( + job_id=job_id, + owner=owner, + job_name=job_name, + dd_scan=False + ) if len(job_status_result) == 0: job_id = "" if job_id == "*" else job_id job_name = "" if job_name == "*" else job_name owner = "" if owner == "*" else owner - job_status_result = _get_job_status(job_id=job_id, owner=owner, job_name=job_name, dd_scan=False) + + job_status_result = _get_job_status( + job_id=job_id, + owner=owner, + job_name=job_name, + dd_scan=False + ) return job_status_result @@ -223,16 +254,13 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # This will also help maintain compatibility with 1.2.3 final_entries = [] - kwargs = { - "job_id": job_id_temp, - } - entries = jobs.listing(**kwargs) + entries = jobs.fetch_multiple(job_id=job_id_temp) while ((entries is None or len(entries) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - entries = jobs.listing(**kwargs) + entries = jobs.fetch_multiple(job_id=job_id_temp) if entries: for entry in entries: @@ -243,30 +271,35 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T if not fnmatch.fnmatch(entry.name, job_name): continue if job_id_temp is not None: - if not fnmatch.fnmatch(entry.id, job_id): + if not fnmatch.fnmatch(entry.job_id, job_id): continue job = {} - job["job_id"] = entry.id + job["job_id"] = entry.job_id job["job_name"] = entry.name job["subsystem"] = "" job["system"] = "" job["owner"] = entry.owner job["ret_code"] = {} - job["ret_code"]["msg"] = entry.status + " " + entry.rc - job["ret_code"]["msg_code"] = entry.rc + job["ret_code"]["msg"] = "{0} {1}".format(entry.status, entry.return_code) + job["ret_code"]["msg_code"] = entry.return_code job["ret_code"]["code"] = None - if len(entry.rc) > 0: - if entry.rc.isdigit(): - job["ret_code"]["code"] = int(entry.rc) + if entry.return_code and len(entry.return_code) > 0: + if entry.return_code.isdigit(): + job["ret_code"]["code"] = int(entry.return_code) job["ret_code"]["msg_text"] = entry.status # this section only works on zoau 1.2.3/+ vvv + # Beginning in ZOAU v1.3.0, the Job class changes svc_class to + # service_class. + if zoau_version_checker.is_zoau_version_higher_than("1.2.5"): + job["service_class"] = entry.service_class + elif zoau_version_checker.is_zoau_version_higher_than("1.2.2"): + job["svc_class"] = entry.svc_class if zoau_version_checker.is_zoau_version_higher_than("1.2.2"): job["job_class"] = entry.job_class - job["svc_class"] = entry.svc_class job["priority"] = entry.priority job["asid"] = entry.asid job["creation_date"] = str(entry.creation_datetime)[0:10] @@ -284,12 +317,12 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["duration"] = duration if dd_scan: - list_of_dds = jobs.list_dds(entry.id) + list_of_dds = jobs.list_dds(entry.job_id) while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - list_of_dds = jobs.list_dds(entry.id) + list_of_dds = jobs.list_dds(entry.job_id) job["duration"] = duration @@ -335,7 +368,10 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T if "stepname" in single_dd: if "dataset" in single_dd: tmpcont = jobs.read_output( - entry.id, single_dd["stepname"], single_dd["dataset"]) + entry.job_id, + single_dd["stepname"], + single_dd["dataset"] + ) dd["content"] = tmpcont.split("\n") job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) From 433cfc0bf85dddd9afcb9c0f2729d0613366b13d Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 30 Jan 2024 12:05:19 -0600 Subject: [PATCH 287/495] [v1.10.0] [module_utils/copy.py] Implement ZOAU 1.3 migration changes into module_utils/copy.py (#1187) * Replaced zoau datasets import * Updated changelog * Updated changelog * Modified copyright year * Update 1187-migrate-module-utils-copy.yml --- .../fragments/1187-migrate-module-utils-copy.yml | 3 +++ plugins/module_utils/copy.py | 12 ++++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1187-migrate-module-utils-copy.yml diff --git a/changelogs/fragments/1187-migrate-module-utils-copy.yml b/changelogs/fragments/1187-migrate-module-utils-copy.yml new file mode 100644 index 000000000..26157f9fc --- /dev/null +++ b/changelogs/fragments/1187-migrate-module-utils-copy.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/copy.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1187). diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index ac9e74758..71b47c974 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019-2023 +# Copyright (c) IBM Corporation 2019-2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -283,7 +283,15 @@ def copy_asa_pds2uss(src, dest): str -- The stderr after the copy command executed successfully """ from os import path - from zoautil_py import datasets + import traceback + from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + ZOAUImportError, + ) + + try: + from zoautil_py import datasets + except Exception: + datasets = ZOAUImportError(traceback.format_exc()) src = _validate_data_set_name(src) dest = _validate_path(dest) From 032d0d83bbd80b51d51cb880fb0914524b906211 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 30 Jan 2024 12:06:25 -0600 Subject: [PATCH 288/495] [v1.10.0] [module_utils/dd_statement.py] Implement ZOAU 1.3 migration changes into module_utils/dd_statement.py (#1190) * Migrated module_utils/dd_statement * Added changelog * Update 1190-migrate-module_utils-dd_statement.yml --- .../1190-migrate-module_utils-dd_statement.yml | 3 +++ plugins/module_utils/dd_statement.py | 12 ++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1190-migrate-module_utils-dd_statement.yml diff --git a/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml b/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml new file mode 100644 index 000000000..4bb3a582d --- /dev/null +++ b/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/dd_statement.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1190). diff --git a/plugins/module_utils/dd_statement.py b/plugins/module_utils/dd_statement.py index d35f9e44e..57b7bcdad 100644 --- a/plugins/module_utils/dd_statement.py +++ b/plugins/module_utils/dd_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -10,11 +10,11 @@ # limitations under the License. from __future__ import absolute_import, division, print_function - +import traceback __metaclass__ = type from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSet @@ -22,7 +22,7 @@ try: from zoautil_py import datasets except ImportError: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) space_units = {"b": "", "kb": "k", "mb": "m", "gb": "g"} @@ -651,8 +651,8 @@ def __init__(self, tmphlq=None): if tmphlq: hlq = tmphlq else: - hlq = datasets.hlq() - name = datasets.tmp_name(hlq) + hlq = datasets.get_hlq() + name = datasets.tmp_name(high_level_qualifier=hlq) super().__init__(name) def __del__(self): From 7ef0c9e6ca142a6264348c96a7eb68f8b9b8a965 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 31 Jan 2024 13:52:39 -0600 Subject: [PATCH 289/495] [v1.10.0] [module_utils/backup.py] Implement ZOAU 1.3 migration changes into module_utils/backup.py (#1188) * Changed datasets call to zoau 1.3 * Updated changelog * Corrected changelog location and tag * Corrected copyright dates * Removed wrong exception raise * Removed unused var --- .../1188-migrate-module_utils-backup.yml | 3 ++ plugins/module_utils/backup.py | 31 +++++++++++-------- 2 files changed, 21 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1188-migrate-module_utils-backup.yml diff --git a/changelogs/fragments/1188-migrate-module_utils-backup.yml b/changelogs/fragments/1188-migrate-module_utils-backup.yml new file mode 100644 index 000000000..65945d06b --- /dev/null +++ b/changelogs/fragments/1188-migrate-module_utils-backup.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/backup.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1188). \ No newline at end of file diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index 28339d842..46f8669c5 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -22,8 +22,9 @@ import time from shutil import copy2, copytree, rmtree +import traceback from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, @@ -39,9 +40,10 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import iebcopy try: - from zoautil_py import datasets + from zoautil_py import datasets, exceptions except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + exceptions = ZOAUImportError(traceback.format_exc()) if PY3: from shlex import quote else: @@ -76,29 +78,32 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): bk_dsn = extract_dsname(dsn) + "({0})".format(temp_member_name()) bk_dsn = _validate_data_set_name(bk_dsn).upper() - response = datasets._copy(dsn, bk_dsn) - if response.rc != 0: + try: + datasets.copy(dsn, bk_dsn) + except exceptions.ZOAUException as copy_exception: raise BackupError( "Unable to backup {0} to {1}".format(dsn, bk_dsn), - rc=response.rc, - stdout=response.stdout_response, - stderr=response.stderr_response + rc=copy_exception.response.rc, + stdout=copy_exception.response.stdout_response, + stderr=copy_exception.response.stderr_response ) else: if not bk_dsn: if tmphlq: hlq = tmphlq else: - hlq = datasets.hlq() - bk_dsn = datasets.tmp_name(hlq) + hlq = datasets.get_hlq() + bk_dsn = datasets.tmp_name(high_level_qualifier=hlq) bk_dsn = _validate_data_set_name(bk_dsn).upper() # In case the backup ds is a member we trust that the PDS attributes are ok to fit the src content. # This should not delete a PDS just to create a backup member. # Otherwise, we allocate the appropiate space for the backup ds based on src. if is_member(bk_dsn): - cp_response = datasets._copy(dsn, bk_dsn) - cp_rc = cp_response.rc + try: + cp_rc = datasets.copy(dsn, bk_dsn) + except exceptions.ZOAUException as copy_exception: + cp_rc = copy_exception.response.rc else: cp_rc = _copy_ds(dsn, bk_dsn) From 79a1ce1a00fba1495f80b5ccd5db7f5e50825cac Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 31 Jan 2024 14:09:09 -0600 Subject: [PATCH 290/495] [v1.10.0][module_utils/data_set.py] Implement ZOAU 1.3 migration changes into module_utils/data_set.py (#1182) * Made changes to module utils * Added traceback import * Updated changelog * Changed year * Updated zoau import * Update 1182-migrate-module-utils-data-set.yml * Changed build zoau args to dataset type --- .../1182-migrate-module-utils-data-set.yml | 3 ++ plugins/module_utils/data_set.py | 45 ++++++++++++------- 2 files changed, 31 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1182-migrate-module-utils-data-set.yml diff --git a/changelogs/fragments/1182-migrate-module-utils-data-set.yml b/changelogs/fragments/1182-migrate-module-utils-data-set.yml new file mode 100644 index 000000000..857327254 --- /dev/null +++ b/changelogs/fragments/1182-migrate-module-utils-data-set.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/data_set.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1182). diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 12265e1b4..8b02d77f4 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 - 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -15,6 +15,7 @@ import re import tempfile +import traceback from os import path, walk from string import ascii_uppercase, digits from random import sample @@ -24,8 +25,8 @@ AnsibleModuleHelper, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, MissingImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( @@ -39,9 +40,10 @@ vtoc = MissingImport("vtoc") try: - from zoautil_py import datasets + from zoautil_py import datasets, exceptions except ImportError: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + exceptions = ZOAUImportError(traceback.format_exc()) class DataSet(object): @@ -316,7 +318,11 @@ def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vo # Now adding special parameters for sequential and partitioned # data sets. if model_type not in DataSet.MVS_VSAM: - block_size = datasets.listing(model)[0].block_size + try: + data_set = datasets.list_datasets(model)[0] + except IndexError: + raise AttributeError("Could not retrieve model data set block size.") + block_size = data_set.block_size alloc_cmd = """{0} - BLKSIZE({1})""".format(alloc_cmd, block_size) @@ -500,7 +506,7 @@ def data_set_volume(name): DatasetVolumeError: When the function is unable to parse the value of VOLSER. """ - data_set_information = datasets.listing(name) + data_set_information = datasets.list_datasets(name) if len(data_set_information) > 0: return data_set_information[0].volume @@ -535,10 +541,10 @@ def data_set_type(name, volume=None): if not DataSet.data_set_exists(name, volume): return None - data_sets_found = datasets.listing(name) + data_sets_found = datasets.list_datasets(name) # Using the DSORG property when it's a sequential or partitioned - # dataset. VSAMs are not found by datasets.listing. + # dataset. VSAMs are not found by datasets.list_datasets. if len(data_sets_found) > 0: return data_sets_found[0].dsorg @@ -912,7 +918,7 @@ def _build_zoau_args(**kwargs): volumes = ",".join(volumes) if volumes else None kwargs["space_primary"] = primary kwargs["space_secondary"] = secondary - kwargs["type"] = type + kwargs["dataset_type"] = type kwargs["volumes"] = volumes kwargs.pop("space_type", None) renamed_args = {} @@ -946,7 +952,7 @@ def create( force=None, ): """A wrapper around zoautil_py - Dataset.create() to raise exceptions on failure. + datasets.create() to raise exceptions on failure. Reasonable default arguments will be set by ZOAU when necessary. Args: @@ -1007,17 +1013,22 @@ def create( """ original_args = locals() formatted_args = DataSet._build_zoau_args(**original_args) - response = datasets._create(**formatted_args) - if response.rc > 0: + try: + datasets.create(**formatted_args) + except (exceptions.ZOAUException, exceptions.DatasetVerificationError) as create_exception: raise DatasetCreateError( - name, response.rc, response.stdout_response + response.stderr_response + name, + create_exception.response.rc, + create_exception.response.stdout_response + create_exception.response.stderr_response ) - return response.rc + # With ZOAU 1.3 we switched from getting a ZOAUResponse obj to a Dataset obj, previously we returned + # response.rc now we just return 0 if nothing failed + return 0 @staticmethod def delete(name): """A wrapper around zoautil_py - Dataset.delete() to raise exceptions on failure. + datasets.delete() to raise exceptions on failure. Arguments: name (str) -- The name of the data set to delete. @@ -1056,7 +1067,7 @@ def create_member(name): @staticmethod def delete_member(name, force=False): """A wrapper around zoautil_py - Dataset.delete_members() to raise exceptions on failure. + datasets.delete_members() to raise exceptions on failure. Arguments: name (str) -- The name of the data set, including member name, to delete. @@ -1306,7 +1317,7 @@ def temp_name(hlq=""): str: The temporary data set name. """ if not hlq: - hlq = datasets.hlq() + hlq = datasets.get_hlq() temp_name = datasets.tmp_name(hlq) return temp_name From 0d889000ede8d6620ecc0be4c56b809a50db5275 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 31 Jan 2024 13:09:45 -0700 Subject: [PATCH 291/495] [Enabler] [zos_copy] Remove zos_fetch call in loadlib test (#1184) * Remove zos_fetch call in loadlib test * Add changelog fragment * Change use of cp to dcp * Add delay to last zos_copy call * Change dcp call * Disable cleanup temporarily * Change tmp dir used * Change scp for sftp * Turn cleanup on once again * Removed print statement --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...184-remove-zos-fetch-dep-from-zos-copy.yml | 3 ++ .../functional/modules/test_zos_copy_func.py | 34 +++++++++++++++---- 2 files changed, 31 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml diff --git a/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml b/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml new file mode 100644 index 000000000..9085743d9 --- /dev/null +++ b/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Remove zos_fetch dependency from zos_copy test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1184). diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index b6fee6689..2cc11c9dd 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -3434,7 +3434,6 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): @pytest.mark.aliases @pytest.mark.parametrize("is_created", [False, True]) def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): - hosts = ansible_zos_module cobol_src_pds = "USER.COBOL.SRC" @@ -3444,6 +3443,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): dest_lib = "USER.LOAD.DEST" pgm_mem = "HELLO" pgm2_mem = "HELLO2" + uss_location = "/tmp/loadlib" try: @@ -3487,11 +3487,32 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): validate_loadlib_pgm(hosts, steplib=src_lib, pgm_name=pgm_mem, expected_output_str=COBOL_PRINT_STR) # fetch loadlib into local - tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") - # fetch loadlib to local - fetch_result = hosts.all.zos_fetch(src=src_lib, dest=tmp_folder.name, is_binary=True) - for res in fetch_result.contacted.values(): - source_path = res.get("dest") + # Copying the loadlib to USS. + hosts.all.file(name=uss_location, state='directory') + hosts.all.shell( + cmd=f"dcp -X -I \"{src_lib}\" {uss_location}", + executable=SHELL_EXECUTABLE + ) + + # Copying the remote loadlibs in USS to a local dir. + # This section ONLY handles ONE host, so if we ever use multiple hosts to + # test, we will need to update this code. + remote_user = hosts["options"]["user"] + # Removing a trailing comma because the framework saves the hosts list as a + # string instead of a list. + remote_host = hosts["options"]["inventory"].replace(",", "") + + tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") + cmd = [ + "sftp", + "-r", + f"{remote_user}@{remote_host}:{uss_location}", + f"{tmp_folder.name}" + ] + with subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE) as sftp_proc: + result = sftp_proc.stdout.read() + + source_path = os.path.join(tmp_folder.name, os.path.basename(uss_location)) if not is_created: # ensure dest data sets absent for this variation of the test case. @@ -3562,6 +3583,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set(name=cobol_src_pds, state="absent") hosts.all.zos_data_set(name=src_lib, state="absent") hosts.all.zos_data_set(name=dest_lib, state="absent") + hosts.all.file(name=uss_location, state="absent") @pytest.mark.pdse From 2109a5cdc42f25bb9a39b53d5f2216bab00e714e Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 31 Jan 2024 14:10:16 -0600 Subject: [PATCH 292/495] [v1.10.0] [module_utils/encode.py] Implement ZOAU 1.3 migration changes into module_utils/encode.py (#1189) * Updated module_utils encode * Updated changelog * Update 1189-migrate-module_utils-encode.yml * Modified datasets.create call * Changed datasets.create call --- .../1189-migrate-module_utils-encode.yml | 3 +++ plugins/module_utils/encode.py | 24 +++++++++---------- 2 files changed, 15 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1189-migrate-module_utils-encode.yml diff --git a/changelogs/fragments/1189-migrate-module_utils-encode.yml b/changelogs/fragments/1189-migrate-module_utils-encode.yml new file mode 100644 index 000000000..d7f471847 --- /dev/null +++ b/changelogs/fragments/1189-migrate-module_utils-encode.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/encode.py - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1189). diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 26bb983b3..195802583 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 - 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -24,9 +24,10 @@ import os import re import locale +import traceback from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, @@ -39,7 +40,7 @@ try: from zoautil_py import datasets except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) if PY3: @@ -188,24 +189,23 @@ def temp_data_set(self, reclen, space_u): str -- Name of the allocated data set Raises: - OSError: When any exception is raised during the data set allocation + ZOAUException: When any exception is raised during the data set allocation. + DatasetVerificationError: When the data set creation could not be verified. """ size = str(space_u * 2) + "K" if self.tmphlq: hlq = self.tmphlq else: - hlq = datasets.hlq() - temp_ps = datasets.tmp_name(hlq) - response = datasets._create( + hlq = datasets.get_hlq() + temp_ps = datasets.tmp_name(high_level_qualifier=hlq) + temporary_data_set = datasets.create( name=temp_ps, - type="SEQ", + dataset_type="SEQ", primary_space=size, record_format="VB", record_length=reclen, ) - if response.rc: - raise OSError("Failed when allocating temporary sequential data set!") - return temp_ps + return temporary_data_set.name def get_codeset(self): """Get the list of supported encodings from the USS command 'iconv -l' @@ -406,7 +406,7 @@ def mvs_convert_encoding( rc, out, err = copy.copy_pds2uss(src, temp_src) if src_type == "VSAM": reclen, space_u = self.listdsi_data_set(src.upper()) - # RDW takes the first 4 bytes or records in the VB format, hence we need to add an extra buffer to the vsam max recl. + # RDW takes the first 4 bytes in the VB format, hence we need to add an extra buffer to the vsam max recl. reclen += 4 temp_ps = self.temp_data_set(reclen, space_u) rc, out, err = copy.copy_vsam_ps(src.upper(), temp_ps) From f81108d5366f79667070bb5fbd604f2b83db4f77 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Thu, 1 Feb 2024 11:17:31 -0800 Subject: [PATCH 293/495] [v1.10.0] [zos_gather_facts] ZOAU 1.3 migration - zos_gather_facts (#1196) * update module to leverage zoau python api for zinfo Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address pep8 issues Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update catch-all error message Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- .../1196-zoau-migration-zos_gather_facts.yml | 4 + plugins/modules/zos_gather_facts.py | 87 ++++++++----------- .../modules/test_zos_gather_facts_func.py | 3 +- tests/unit/test_zos_gather_facts.py | 29 ++++--- 4 files changed, 56 insertions(+), 67 deletions(-) create mode 100644 changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml diff --git a/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml b/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml new file mode 100644 index 000000000..03f39b535 --- /dev/null +++ b/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml @@ -0,0 +1,4 @@ +trivial: + - zos_gather_facts - Update module internally to leverage ZOAU python API + for zinfo. + (https://github.com/ansible-collections/ibm_zos_core/pull/1196). \ No newline at end of file diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index b7aeb7ee4..2ea7b0baf 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -108,30 +108,38 @@ """ from fnmatch import fnmatch -import json +import traceback from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( zoau_version_checker ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + ZOAUImportError, +) + +try: + from zoautil_py import zsystem +except ImportError: + zsystem = ZOAUImportError(traceback.format_exc()) + -def zinfo_cmd_string_builder(gather_subset): - """Builds a command string for 'zinfo' based off the gather_subset list. +def zinfo_facts_list_builder(gather_subset): + """Builds a list of strings to pass into 'zinfo' based off the + gather_subset list. Arguments: gather_subset {list} -- A list of subsets to pass in. Returns: - [str] -- A string that contains a command line argument for calling - zinfo with the appropriate options. + [list[str]] -- A list of strings that contains sanitized subsets. [None] -- An invalid value was received for the subsets. """ if gather_subset is None or 'all' in gather_subset: - return "zinfo -j -a" + return ["all"] # base value - zinfo_arg_string = "zinfo -j" + subsets_list = [] - # build full string for subset in gather_subset: # remove leading/trailing spaces subset = subset.strip() @@ -141,9 +149,9 @@ def zinfo_cmd_string_builder(gather_subset): # sanitize subset against malicious (probably alphanumeric only?) if not subset.isalnum(): return None - zinfo_arg_string += " -t " + subset + subsets_list.append(subset) - return zinfo_arg_string + return subsets_list def flatten_zinfo_json(zinfo_dict): @@ -214,59 +222,36 @@ def run_module(): if module.check_mode: module.exit_json(**result) - if not zoau_version_checker.is_zoau_version_higher_than("1.2.1"): + if not zoau_version_checker.is_zoau_version_higher_than("1.3.0"): module.fail_json( - ("The zos_gather_facts module requires ZOAU >= 1.2.1. Please " + ("The zos_gather_facts module requires ZOAU >= 1.3.0. Please " "upgrade the ZOAU version on the target node.") ) gather_subset = module.params['gather_subset'] - # build out zinfo command with correct options + # build out list of strings to pass to zinfo python api. # call this whether or not gather_subsets list is empty/valid/etc - # rely on the function to report back errors. Note the function only + # rely on the helper function to report back errors. Note the function only # returns None if there's malicious or improperly formatted subsets. - # Invalid subsets are caught when the actual zinfo command is run. - cmd = zinfo_cmd_string_builder(gather_subset) - if not cmd: + # Invalid subsets are caught when the actual zinfo function is run. + facts_list = zinfo_facts_list_builder(gather_subset) + if not facts_list: module.fail_json(msg="An invalid subset was passed to Ansible.") - rc, fcinfo_out, err = module.run_command(cmd, encoding=None) - - decode_str = fcinfo_out.decode('utf-8') - - # We DO NOT return a partial list. Instead we FAIL FAST since we are - # targeting automation -- quiet but well-intended error messages may easily - # be skipped - if rc != 0: - # there are 3 known error messages in zinfo, if neither gets - # triggered then we send out this generic zinfo error message. - err_msg = ('An exception has occurred in Z Open Automation Utilities ' - '(ZOAU) utility \'zinfo\'. See \'zinfo_err_msg\' for ' - 'additional details.') - # triggered by invalid optarg eg "zinfo -q" - if 'BGYSC5201E' in err.decode('utf-8'): - err_msg = ('Invalid call to zinfo. See \'zinfo_err_msg\' for ' - 'additional details.') - # triggered when optarg does not get expected arg eg "zinfo -t" - elif 'BGYSC5202E' in err.decode('utf-8'): - err_msg = ('Invalid call to zinfo. Possibly missing a valid subset' - ' See \'zinfo_err_msg\' for additional details.') - # triggered by illegal subset eg "zinfo -t abc" - elif 'BGYSC5203E' in err.decode('utf-8'): - err_msg = ('An invalid subset was detected. See \'zinfo_err_msg\' ' - 'for additional details.') - - module.fail_json(msg=err_msg, zinfo_err_msg=err) - zinfo_dict = {} # to track parsed zinfo facts. try: - zinfo_dict = json.loads(decode_str) - except json.JSONDecodeError: - # tell user something else for this error? This error is thrown when - # Python doesn't like the json string it parsed from zinfo. - module.fail_json(msg="Unsupported JSON format for the output.") + zinfo_dict = zsystem.zinfo(json=True, facts=facts_list) + except ValueError: + err_msg = 'An invalid subset was detected.' + module.fail_json(msg=err_msg) + except Exception as e: + err_msg = ( + 'An exception has occurred. Unable to gather facts. ' + 'See stderr for more details.' + ) + module.fail_json(msg=err_msg, stderr=str(e)) # remove zinfo subsets from parsed zinfo result, flatten by one level flattened_d = flatten_zinfo_json(zinfo_dict) diff --git a/tests/functional/modules/test_zos_gather_facts_func.py b/tests/functional/modules/test_zos_gather_facts_func.py index 1903f0cbd..f2861c596 100644 --- a/tests/functional/modules/test_zos_gather_facts_func.py +++ b/tests/functional/modules/test_zos_gather_facts_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 +# Copyright (c) IBM Corporation 2022 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -120,7 +120,6 @@ def test_with_gather_subset_bad(ansible_zos_module, gather_subset): for result in results.contacted.values(): assert result is not None - assert re.match(r'^BGYSC5203E', result.get('zinfo_err_msg')) assert re.match(r'^An invalid subset', result.get('msg')) diff --git a/tests/unit/test_zos_gather_facts.py b/tests/unit/test_zos_gather_facts.py index 84b90c186..a7ab4a803 100644 --- a/tests/unit/test_zos_gather_facts.py +++ b/tests/unit/test_zos_gather_facts.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 +# Copyright (c) IBM Corporation 2022 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -18,7 +18,6 @@ __metaclass__ = type import pytest -from mock import call # Used my some mock modules, should match import directly below IMPORT_NAME = "ibm_zos_core.plugins.modules.zos_gather_facts" @@ -26,30 +25,32 @@ # Tests for zos_father_facts helper functions test_data = [ - (["ipl"], "zinfo -j -t ipl"), - (["ipl "], "zinfo -j -t ipl"), - ([" ipl"], "zinfo -j -t ipl"), - (["ipl", "sys"], "zinfo -j -t ipl -t sys"), - (["all"], "zinfo -j -a"), - (None, "zinfo -j -a"), - (["ipl", "all", "sys"], "zinfo -j -a"), + (["ipl"], ["ipl"]), + (["ipl "], ["ipl"]), + ([" ipl"], ["ipl"]), + (["ipl", "sys"], ["ipl", "sys"]), + (["all"], ["all"]), + (None, ["all"]), + (["ipl", "all", "sys"], ["all"]), # function does not validate legal vs illegal subsets - (["asdf"], "zinfo -j -t asdf"), - ([""], None), # attemtped injection + (["asdf"], ["asdf"]), + ([""], None), (["ipl; cat /.bashrc"], None), # attemtped injection + # for now, 'all' with some other invalid subset resolves to 'all' + (["ipl", "all", "ipl; cat /.ssh/id_rsa"], ["all"]), ] @pytest.mark.parametrize("args,expected", test_data) -def test_zos_gather_facts_zinfo_cmd_string_builder( +def test_zos_gather_facts_zinfo_facts_list_builder( zos_import_mocker, args, expected): mocker, importer = zos_import_mocker zos_gather_facts = importer(IMPORT_NAME) try: - result = zos_gather_facts.zinfo_cmd_string_builder(args) -# # add more logic here as the function evolves. + result = zos_gather_facts.zinfo_facts_list_builder(args) + # add more logic here as the function evolves. except Exception: result = None assert result == expected From 3b3176b3fbae2f366b84883d01fc7f11a32f963f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 2 Feb 2024 12:51:17 -0600 Subject: [PATCH 294/495] [v1.10.0] [zos_backup_restore]Added choices for space type (#1200) * Added choices for space type * Added changelog --- .../fragments/1200-zos_backup_restore-sanity-issues.yml | 4 ++++ plugins/modules/zos_backup_restore.py | 2 +- tests/sanity/ignore-2.14.txt | 1 - tests/sanity/ignore-2.15.txt | 1 - tests/sanity/ignore-2.16.txt | 1 - 5 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml diff --git a/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml b/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml new file mode 100644 index 000000000..27d40f560 --- /dev/null +++ b/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml @@ -0,0 +1,4 @@ +trivial: + - zos_backup_restore - Added space type choices to argument spec to remove + validate-modules:doc-choices-do-not-match-spec. + (https://github.com/ansible-collections/ibm_zos_core/pull/1200). diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 9d8560306..080c7efab 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -337,7 +337,7 @@ def main(): ), ), space=dict(type="int", required=False, aliases=["size"]), - space_type=dict(type="str", required=False, aliases=["unit"]), + space_type=dict(type="str", required=False, aliases=["unit"], choices=["K", "M", "G", "CYL", "TRK"]), volume=dict(type="str", required=False), full_volume=dict(type="bool", default=False), temp_volume=dict(type="str", required=False, aliases=["dest_volume"]), diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 8099f00e0..89cf4db51 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -1,5 +1,4 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 8099f00e0..89cf4db51 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -1,5 +1,4 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 8099f00e0..89cf4db51 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -1,5 +1,4 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_backup_restore.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From 9824b0925f2777ad641f54a17f407e924a2f1936 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 6 Feb 2024 14:09:52 -0600 Subject: [PATCH 295/495] Cherry picked removed hard coded content from staging-v1.9.0-beta.1 (#1194) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Cherry picked removed hard coded content from staging-v1.9.0-beta.1 Added preferred volumes changes [v1.10.0] [zos_copy] Enhance zos_copy performance when copying multiple PDS members (#1183) * [v1.9.0] [zos_copy] Enhancement/764/copy members (#1176) * Changed member copy into a bulk call * Modified copy to members * Cleaned code * Removed hardcoded content from zos_copy tests * Added fix for uss files * Added distinction between uss and mvs * Added alias fix * Moved the copy section to below * Modified for seq test * Added fix for copy dest lock * Added msgs for debugging * Added final changes to member copy * Added copy for when seq to pdse * Add changelog * Added a line into docs * Modified doc * Modified doc * Update changelog * Created a new changelog * Corrected typo Fix for empty volumes on test_config Added comment to remind uncomment test case in the future Add more validation Add more validation Add more validation Add to config volumes and remove upper case variable name * Added keyword into tests --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- tests/conftest.py | 12 +- tests/functional/modules/test_zos_apf_func.py | 432 ++++++++++++------ .../modules/test_zos_archive_func.py | 270 ++++++----- .../modules/test_zos_backup_restore.py | 191 ++++---- .../modules/test_zos_blockinfile_func.py | 118 ++--- .../functional/modules/test_zos_copy_func.py | 235 +++++----- .../modules/test_zos_data_set_func.py | 260 ++++++----- .../modules/test_zos_encode_func.py | 200 +++++--- .../functional/modules/test_zos_fetch_func.py | 92 ++-- .../functional/modules/test_zos_find_func.py | 17 +- .../modules/test_zos_job_query_func.py | 6 +- .../modules/test_zos_job_submit_func.py | 56 ++- .../modules/test_zos_lineinfile_func.py | 111 ++--- .../functional/modules/test_zos_mount_func.py | 57 ++- .../modules/test_zos_mvs_raw_func.py | 273 ++++++----- .../modules/test_zos_tso_command_func.py | 15 +- .../modules/test_zos_unarchive_func.py | 223 ++++----- tests/helpers/dataset.py | 48 ++ tests/helpers/volumes.py | 121 +++++ 19 files changed, 1649 insertions(+), 1088 deletions(-) create mode 100644 tests/helpers/dataset.py create mode 100644 tests/helpers/volumes.py diff --git a/tests/conftest.py b/tests/conftest.py index 506214f29..c8513ad37 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,9 +12,9 @@ from __future__ import absolute_import, division, print_function __metaclass__ = type - import pytest from ibm_zos_core.tests.helpers.ztest import ZTestHelper +from ibm_zos_core.tests.helpers.volumes import get_volumes import sys from mock import MagicMock import importlib @@ -84,6 +84,14 @@ def ansible_zos_module(request, z_python_interpreter): except Exception: pass + # Call of the class by the class ls_Volume (volumes.py file) as many times needed + # one time the array is filled +@pytest.fixture(scope="session") +def volumes_on_systems(ansible_zos_module, request): + """ Call the pytest-ansible plugin to check volumes on the system and work properly a list by session.""" + path = request.config.getoption("--zinventory") + list_Volumes = get_volumes(ansible_zos_module, path) + yield list_Volumes # * We no longer edit sys.modules directly to add zoautil_py mock # * because automatic teardown is not performed, leading to mock pollution @@ -108,4 +116,4 @@ def perform_imports(imports): newimp = [importlib.import_module(x) for x in imports] return newimp - yield (mocker, perform_imports) + yield (mocker, perform_imports) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index f53ee7592..3c3d96ab2 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -12,6 +12,8 @@ # limitations under the License. from __future__ import absolute_import, division, print_function +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.volumes import Volume_Handler from shellescape import quote from pprint import pprint import os @@ -20,37 +22,6 @@ __metaclass__ = type - -TEST_INFO = dict( - test_add_del=dict( - library="", state="present", force_dynamic=True - ), - test_add_del_with_tmp_hlq_option=dict( - library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict( - data_set_name="", backup=True - ) - ), - test_add_del_volume=dict( - library="", volume=" ", state="present", force_dynamic=True - ), - test_add_del_persist=dict( - library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True - ), - test_add_del_volume_persist=dict( - library="", volume=" ", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True - ), - test_batch_add_del=dict( - batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")], - persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True - ), - test_operation_list=dict( - operation="list" - ), - test_operation_list_with_filter=dict( - operation="list", library="" - ) -) - add_expected = """/*BEGINAPFLIST*/ /*BEGINBLOCK*/ APFADDDSNAME({0})VOLUME({1}) @@ -74,63 +45,40 @@ del_expected = """/*BEGINAPFLIST*/ /*ENDAPFLIST*/""" - -def run_shell_cmd(hosts, cmdStr): - results = hosts.all.shell(cmd=cmdStr) - pprint(vars(results)) - for result in results.contacted.values(): - out = result.get("stdout") - return out - - -def persistds_create(hosts): - cmdStr = "mvstmp APFTEST.PRST" - prstds = run_shell_cmd(hosts, cmdStr)[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - run_shell_cmd(hosts, cmdStr) - return prstds - - -def persistds_delele(hosts, ds): - cmdStr = "drm {0}".format(ds) - run_shell_cmd(hosts, cmdStr) - - -def set_test_env(hosts, test_info): - # results = hosts.all.zos_data_set(name=ds, type="SEQ") - cmdStr = "mvstmp APFTEST" - ds = run_shell_cmd(hosts, cmdStr)[:25] - cmdStr = "dtouch -tseq {0}".format(ds) - run_shell_cmd(hosts, cmdStr) - test_info['library'] = ds - if test_info.get('volume'): - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - vol = run_shell_cmd(hosts, cmdStr) - test_info['volume'] = vol - if test_info.get('persistent'): - test_info['persistent']['data_set_name'] = persistds_create(hosts) - - def clean_test_env(hosts, test_info): - # hosts.all.zos_data_set(name=test_info['library'], state='absent') cmdStr = "drm {0}".format(test_info['library']) - run_shell_cmd(hosts, cmdStr) + hosts.all.shell(cmd=cmdStr) if test_info.get('persistent'): - # hosts.all.zos_data_set(name=test_info['persistent']['data_set_name'], state='absent') - persistds_delele(hosts, test_info['persistent']['data_set_name']) + cmdStr = "drm {0}".format(test_info['persistent']['data_set_name']) + hosts.all.shell(cmd=cmdStr) def test_add_del(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] - set_test_env(hosts, test_info) + test_info = dict(library="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 clean_test_env(hosts, test_info) @@ -139,17 +87,32 @@ def test_add_del(ansible_zos_module): def test_add_del_with_tmp_hlq_option(ansible_zos_module): hosts = ansible_zos_module tmphlq = "TMPHLQ" - test_info = TEST_INFO['test_add_del_with_tmp_hlq_option'] + test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True)) test_info['tmp_hlq'] = tmphlq - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 assert result.get("backup_name")[:6] == tmphlq test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 clean_test_env(hosts, test_info) @@ -157,15 +120,30 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module): def test_add_del_volume(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_volume'] - set_test_env(hosts, test_info) + test_info = dict(library="", volume="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 clean_test_env(hosts, test_info) @@ -200,65 +178,111 @@ def test_add_del_persist(ansible_zos_module): def test_add_del_volume_persist(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_volume_persist'] - set_test_env(hosts, test_info) + test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 add_exptd = add_expected.format(test_info['library'], test_info['volume']) add_exptd = add_exptd.replace(" ", "") cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + actual = result.get("stdout") + actual = actual.replace(" ", "") assert actual == add_exptd test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 del_exptd = del_expected.replace(" ", "") cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + actual = result.get("stdout") + actual = actual.replace(" ", "") assert actual == del_exptd clean_test_env(hosts, test_info) - -def test_batch_add_del(ansible_zos_module): - hosts = ansible_zos_module - test_info = TEST_INFO['test_batch_add_del'] - for item in test_info['batch']: - set_test_env(hosts, item) - test_info['persistent']['data_set_name'] = persistds_create(hosts) - results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("rc") == 0 - add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], - test_info['batch'][1]['library'], test_info['batch'][1]['volume'], - test_info['batch'][2]['library'], test_info['batch'][2]['volume']) - add_exptd = add_exptd.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") - assert actual == add_exptd - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("rc") == 0 - del_exptd = del_expected.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") - assert actual == del_exptd - for item in test_info['batch']: - clean_test_env(hosts, item) - persistds_delele(hosts, test_info['persistent']['data_set_name']) +""" +keyword: ENABLE-FOR-1-3 +Test commented because there is a failure in ZOAU 1.2.x, that should be fixed in 1.3.x, so +whoever works in issue https://github.com/ansible-collections/ibm_zos_core/issues/726 +should uncomment this test as part of the validation process. +""" +#def test_batch_add_del(ansible_zos_module): +# hosts = ansible_zos_module +# test_info = dict( +# batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")], +# persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True +# ) +# for item in test_info['batch']: +# ds = get_tmp_ds_name(1,1) +# hosts.all.shell(cmd="dtouch {0}".format(ds)) +# item['library'] = ds +# cmdStr = "dls -l " + ds + " | awk '{print $5}' " +# results = hosts.all.shell(cmd=cmdStr) +# for result in results.contacted.values(): +# vol = result.get("stdout") +# item['volume'] = vol +# prstds = get_tmp_ds_name(5,5) +# cmdStr = "dtouch {0}".format(prstds) +# hosts.all.shell(cmd=cmdStr) +# test_info['persistent']['data_set_name'] = prstds +# hosts.all.shell(cmd="echo \"{0}\" > {1}".format("Hello World, Here's Jhonny", prstds)) +# results = hosts.all.zos_apf(**test_info) +# pprint(vars(results)) +# for result in results.contacted.values(): +# assert result.get("rc") == 0 +# add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], +# test_info['batch'][1]['library'], test_info['batch'][1]['volume'], +# test_info['batch'][2]['library'], test_info['batch'][2]['volume']) +# add_exptd = add_exptd.replace(" ", "") +# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) +# results = hosts.all.shell(cmd=cmdStr) +# for result in results.contacted.values(): +# actual = result.get("stdout") +# actual = actual.replace(" ", "") +# assert actual == add_exptd +# test_info['state'] = 'absent' +# results = hosts.all.zos_apf(**test_info) +# pprint(vars(results)) +# for result in results.contacted.values(): +# assert result.get("rc") == 0 +# del_exptd = del_expected.replace(" ", "") +# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) +# results = hosts.all.shell(cmd=cmdStr) +# for result in results.contacted.values(): +# actual = result.get("stdout") +# actual = actual.replace(" ", "") +# assert actual == del_exptd +# for item in test_info['batch']: +# clean_test_env(hosts, item) +# cmdStr = "drm {0}".format(test_info['persistent']['data_set_name']) +# hosts.all.shell(cmd=cmdStr) def test_operation_list(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_operation_list'] + test_info = dict(operation="list") results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): listJson = result.get("stdout") import json @@ -269,14 +293,30 @@ def test_operation_list(ansible_zos_module): def test_operation_list_with_filter(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] + test_info = dict(library="", state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds hosts.all.zos_apf(**test_info) - ti = TEST_INFO['test_operation_list_with_filter'] + ti = dict(operation="list", library="") ti['library'] = "APFTEST.*" results = hosts.all.zos_apf(**ti) - pprint(vars(results)) for result in results.contacted.values(): listFiltered = result.get("stdout") assert test_info['library'] in listFiltered @@ -291,15 +331,30 @@ def test_operation_list_with_filter(ansible_zos_module): def test_add_already_present(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] + test_info = dict(library="", state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 assert result.get("rc") == 16 or result.get("rc") == 8 @@ -310,11 +365,27 @@ def test_add_already_present(ansible_zos_module): def test_del_not_present(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] - set_test_env(hosts, test_info) + test_info = dict(library="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 assert result.get("rc") == 16 or result.get("rc") == 8 @@ -323,10 +394,9 @@ def test_del_not_present(ansible_zos_module): def test_add_not_found(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del'] + test_info = dict(library="", state="present", force_dynamic=True) test_info['library'] = 'APFTEST.FOO.BAR' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 assert result.get("rc") == 16 or result.get("rc") == 8 @@ -334,12 +404,28 @@ def test_add_not_found(ansible_zos_module): def test_add_with_wrong_volume(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_volume'] + test_info = dict(library="", volume="", state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds test_info['volume'] = 'T12345' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 assert result.get("rc") == 16 or result.get("rc") == 8 @@ -348,13 +434,29 @@ def test_add_with_wrong_volume(ansible_zos_module): def test_persist_invalid_ds_format(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_persist'] + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds cmdStr = "decho \"some text to test persistent data_set format validattion.\" \"{0}\"".format(test_info['persistent']['data_set_name']) - run_shell_cmd(hosts, cmdStr) + hosts.all.shell(cmd=cmdStr) results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 8 clean_test_env(hosts, test_info) @@ -362,12 +464,28 @@ def test_persist_invalid_ds_format(ansible_zos_module): def test_persist_invalid_marker(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_persist'] + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds test_info['persistent']['marker'] = "# Invalid marker format" results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 4 clean_test_env(hosts, test_info) @@ -375,12 +493,28 @@ def test_persist_invalid_marker(ansible_zos_module): def test_persist_invalid_marker_len(ansible_zos_module): hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_persist'] + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) test_info['state'] = 'present' - set_test_env(hosts, test_info) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("msg") == 'marker length may not exceed 72 characters' - clean_test_env(hosts, test_info) + clean_test_env(hosts, test_info) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 32bedb4fe..a9bfd658c 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -14,6 +14,7 @@ from __future__ import absolute_import, division, print_function +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name import time import pytest @@ -22,13 +23,9 @@ SHELL_EXECUTABLE = "/bin/sh" USS_TEMP_DIR = "/tmp/archive" USS_TEST_FILES = { f"{USS_TEMP_DIR}/foo.txt" : "foo sample content", - f"{USS_TEMP_DIR}/bar.txt": "bar sample content", + f"{USS_TEMP_DIR}/bar.txt": "bar sample content", f"{USS_TEMP_DIR}/empty.txt":""} USS_EXCLUSION_FILE = f"{USS_TEMP_DIR}/foo.txt" -TEST_PS = "USER.PRIVATE.TESTDS" -TEST_PDS = "USER.PRIVATE.TESTPDS" -HLQ = "USER" -MVS_DEST_ARCHIVE = "USER.PRIVATE.ARCHIVE" USS_DEST_ARCHIVE = "testarchive.dzp" @@ -331,6 +328,7 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): - test_mvs_archive_multiple_data_sets_with_missing """ +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -338,9 +336,9 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), ] ) @pytest.mark.parametrize( @@ -352,12 +350,15 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + src_data_set = get_tmp_ds_name() + archive_data_set = get_tmp_ds_name() + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=src_data_set, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -368,7 +369,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{src_data_set}({member})", type="member", state="present" ) @@ -380,33 +381,33 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{src_data_set}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{src_data_set}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) if format == "terse": format_dict["format_options"] = dict(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), - dest=MVS_DEST_ARCHIVE, + src=src_data_set, + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") + assert result.get("dest") == archive_data_set + assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -414,9 +415,9 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), ] ) @pytest.mark.parametrize( @@ -428,12 +429,15 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name() + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=src_data_set, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -444,7 +448,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{src_data_set}({member})", type="member", state="present" ) @@ -456,9 +460,9 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{src_data_set}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{src_data_set}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) @@ -466,23 +470,24 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data if format == "terse": format_dict["format_options"].update(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), - dest=MVS_DEST_ARCHIVE, + src=src_data_set, + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") + assert result.get("dest") == archive_data_set + assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -490,20 +495,23 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), ] ) def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name() + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=src_data_set, type=data_set.get("dstype"), state="present", record_format="FB", @@ -513,7 +521,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{src_data_set}({member})", type="member", state="present" ) @@ -521,34 +529,36 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d test_line = "this is a test line" for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{src_data_set}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{src_data_set}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) if format == "terse": format_dict["format_options"] = dict(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), - dest=MVS_DEST_ARCHIVE, + src=src_data_set, + dest=archive_data_set, format=format_dict, remove=True, ) # assert response is positive for result in archive_result.contacted.values(): + print(result) assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") + assert result.get("dest") == archive_data_set + assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") - assert data_set.get("name") != c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") + assert src_data_set != c_result.get("stdout") finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -556,17 +566,19 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) -def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set ): +def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=src_data_set, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -591,25 +603,25 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set ): format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), - dest=MVS_DEST_ARCHIVE, + src="{0}*".format(src_data_set), + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest") == archive_data_set for ds in target_ds_list: assert ds.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - + hosts.all.shell(cmd="drm {0}*".format(src_data_set)) + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -617,17 +629,19 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set ): ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) -def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, format, data_set ): +def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=src_data_set, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -651,10 +665,10 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma if format == "terse": format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) - exclude = "{0}1".format(data_set.get("name")) + exclude = "{0}1".format(src_data_set) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), - dest=MVS_DEST_ARCHIVE, + src="{0}*".format(src_data_set), + dest=archive_data_set, format=format_dict, exclude=exclude, ) @@ -662,7 +676,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest") == archive_data_set for ds in target_ds_list: if ds.get("name") == exclude: assert exclude not in result.get("archived") @@ -670,12 +684,12 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma assert ds.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - + hosts.all.shell(cmd="drm {0}*".format(src_data_set)) + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -683,17 +697,19 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) -def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set ): +def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=src_data_set, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -718,8 +734,8 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), - dest=MVS_DEST_ARCHIVE, + src="{0}*".format(src_data_set), + dest=archive_data_set, format=format_dict, remove=True, ) @@ -727,18 +743,18 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest") == archive_data_set cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") for ds in target_ds_list: assert ds.get("name") in result.get("archived") assert ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - + hosts.all.shell(cmd="drm {0}*".format(src_data_set)) + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -746,17 +762,19 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) -def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, data_set ): +def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=src_data_set, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -777,7 +795,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) # Remove ds to make sure is missing - missing_ds = data_set.get("name")+"1" + missing_ds = src_data_set+"1" hosts.all.zos_data_set(name=missing_ds, state="absent") path_list = [ds.get("name") for ds in target_ds_list] @@ -787,14 +805,14 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( src=path_list, - dest=MVS_DEST_ARCHIVE, + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE + assert result.get("dest") == archive_data_set assert result.get("dest_state") == STATE_INCOMPLETE assert missing_ds in result.get("missing") for ds in target_ds_list: @@ -804,12 +822,13 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, assert ds.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.shell(cmd="drm {0}*".format(src_data_set)) + hosts.all.zos_data_set(name=archive_data_set, state="absent") +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -817,20 +836,23 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2"]), + dict(dstype="PDSE", members=["MEM1", "MEM2"]), ] ) -def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_set,): +def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + archive_data_set = get_tmp_ds_name() + src_data_set = get_tmp_ds_name(5, 4) + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=src_data_set, type=data_set.get("dstype"), state="present", replace=True, @@ -839,7 +861,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{src_data_set}({member})", type="member", state="present" ) @@ -847,9 +869,9 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ test_line = "this is a test line" for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{src_data_set}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{src_data_set}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) @@ -870,19 +892,19 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ time.sleep(5) archive_result = hosts.all.zos_archive( - src=data_set.get("name"), - dest=MVS_DEST_ARCHIVE, + src=src_data_set, + dest=archive_data_set, format=format_dict, ) # assert response is positive for result in archive_result.contacted.values(): assert result.get("changed") is True - assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") + assert result.get("dest") == archive_data_set + assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) for c_result in cmd_result.contacted.values(): - assert MVS_DEST_ARCHIVE in c_result.get("stdout") + assert archive_data_set in c_result.get("stdout") finally: # extract pid @@ -893,5 +915,5 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) # clean up c code/object/executable files, jcl hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") - hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=archive_data_set, state="absent") diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 6231e0902..1b44ec124 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -15,19 +15,12 @@ __metaclass__ = type +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name import pytest from re import search, IGNORECASE, MULTILINE import string import random -VOLUME = "222222" -VOLUME2 = "222222" -VOLUME_TO_BACKUP = VOLUME -BIG_VOLUME = "DSHRL1" -BIG_VOLUME2 = "DSHRL2" -DATA_SET_NAME = "USER.PRIVATE.TESTDS" -DATA_SET_NAME2 = "USER.PRIVATE.TESTDS2" -DATA_SET_PATTERN = "USER.PRIVATE.*" DATA_SET_CONTENTS = "HELLO world" DATA_SET_QUALIFIER = "{0}.PRIVATE.TESTDS" DATA_SET_QUALIFIER2 = "{0}.PRIVATE.TESTDS2" @@ -137,6 +130,7 @@ def assert_data_set_or_file_does_not_exist(hosts, name): def assert_data_set_exists(hosts, data_set_name): results = hosts.all.shell("dls '{0}'".format(data_set_name.upper())) for result in results.contacted.values(): + print(result) found = search( "^{0}$".format(data_set_name), result.get("stdout"), IGNORECASE | MULTILINE ) @@ -183,7 +177,7 @@ def assert_file_does_not_exist(hosts, path): # Start of tests # # ---------------------------------------------------------------------------- # - +@pytest.mark.ds @pytest.mark.parametrize( "backup_name,overwrite,recover", [ @@ -199,16 +193,17 @@ def assert_file_does_not_exist(hosts, path): ) def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: if not overwrite: delete_data_set_or_file(hosts, backup_name) - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, overwrite=overwrite, recover=recover, @@ -216,7 +211,7 @@ def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover) assert_module_did_not_fail(results) assert_data_set_or_file_exists(hosts, backup_name) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -233,15 +228,16 @@ def test_backup_of_data_set_when_backup_dest_exists( ansible_zos_module, backup_name, overwrite ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: create_data_set_or_file_with_contents(hosts, backup_name, DATA_SET_CONTENTS) assert_data_set_or_file_exists(hosts, backup_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, overwrite=overwrite, ) @@ -251,7 +247,7 @@ def test_backup_of_data_set_when_backup_dest_exists( assert_module_failed(results) assert_data_set_or_file_exists(hosts, backup_name) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -272,16 +268,16 @@ def test_backup_and_restore_of_data_set( ansible_zos_module, backup_name, overwrite, recover ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, overwrite=overwrite, recover=recover, @@ -295,9 +291,8 @@ def test_backup_and_restore_of_data_set( overwrite=overwrite, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) @@ -321,16 +316,16 @@ def test_backup_and_restore_of_data_set_various_space_measurements( ansible_zos_module, backup_name, space, space_type ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) args = dict( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, overwrite=True, space=space, @@ -351,9 +346,8 @@ def test_backup_and_restore_of_data_set_various_space_measurements( args["space_type"] = space_type results = hosts.all.zos_backup_restore(**args) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) @@ -371,16 +365,16 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( ansible_zos_module, backup_name, overwrite ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, ) assert_module_did_not_fail(results) @@ -391,7 +385,6 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( hlq=NEW_HLQ, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) results = hosts.all.zos_backup_restore( operation="restore", backup_name=backup_name, @@ -403,35 +396,67 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( else: assert_module_failed(results) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) -@pytest.mark.parametrize( - "data_set_include", - [ - [DATA_SET_NAME, DATA_SET_NAME2], - DATA_SET_PATTERN, - ], -) -def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module, data_set_include): +def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_name2 = get_tmp_ds_name() + data_set_include = [data_set_name, data_set_name2] try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_NAME2) + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) + delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS + ) + create_sequential_data_set_with_contents( + hosts, data_set_name2, DATA_SET_CONTENTS + ) + results = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=data_set_include), + backup_name=DATA_SET_BACKUP_LOCATION, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=DATA_SET_BACKUP_LOCATION, + overwrite=True, + recover=True, + hlq=NEW_HLQ, + ) + assert_module_did_not_fail(results) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + + +def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_name2 = get_tmp_ds_name() + data_sets_hlq = "ANSIBLE.**" + try: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) + delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME2, DATA_SET_CONTENTS + hosts, data_set_name2, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=data_set_include), + data_sets=dict(include=data_sets_hlq), backup_name=DATA_SET_BACKUP_LOCATION, ) assert_module_did_not_fail(results) @@ -444,11 +469,10 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module, data_set_i hlq=NEW_HLQ, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION2) + assert_data_set_exists(hosts, DATA_SET_BACKUP_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_NAME2) + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) @@ -456,21 +480,23 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module, data_set_i def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_name2 = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_NAME2) + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME2, DATA_SET_CONTENTS + hosts, data_set_name2, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_PATTERN, exclude=DATA_SET_NAME2), + data_sets=dict(include="ANSIBLE.**", exclude=data_set_name2), backup_name=DATA_SET_BACKUP_LOCATION, ) assert_module_did_not_fail(results) @@ -483,11 +509,11 @@ def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): hlq=NEW_HLQ, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) + assert_data_set_exists(hosts, DATA_SET_BACKUP_LOCATION) assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION2) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) - delete_data_set_or_file(hosts, DATA_SET_NAME2) + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_name2) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) @@ -534,32 +560,34 @@ def test_backup_of_data_set_when_data_set_does_not_exist( ansible_zos_module, backup_name ): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=backup_name, ) assert_module_failed(results) assert_data_set_or_file_does_not_exist(hosts, backup_name) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), # volume=get_unused_volume_serial(hosts), volume="@@@@", backup_name=DATA_SET_BACKUP_LOCATION, @@ -567,22 +595,23 @@ def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): assert_module_failed(results) assert_data_set_does_not_exist(hosts, DATA_SET_BACKUP_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() try: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( - hosts, DATA_SET_NAME, DATA_SET_CONTENTS + hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", - data_sets=dict(include=DATA_SET_NAME), + data_sets=dict(include=data_set_name), backup_name=DATA_SET_BACKUP_LOCATION, ) assert_module_did_not_fail(results) @@ -597,7 +626,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): assert_module_failed(results) assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION) finally: - delete_data_set_or_file(hosts, DATA_SET_NAME) + delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) @@ -606,15 +635,15 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # hosts = ansible_zos_module # try: # delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_NAME) -# delete_data_set_or_file(hosts, DATA_SET_NAME2) +# delete_data_set_or_file(hosts, data_set_name) +# delete_data_set_or_file(hosts, data_set_name2) # delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) # delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) # create_sequential_data_set_with_contents( -# hosts, DATA_SET_NAME, DATA_SET_CONTENTS, VOLUME +# hosts, data_set_name, DATA_SET_CONTENTS, VOLUME # ) # create_sequential_data_set_with_contents( -# hosts, DATA_SET_NAME2, DATA_SET_CONTENTS, VOLUME2 +# hosts, data_set_name2, DATA_SET_CONTENTS, VOLUME2 # ) # results = hosts.all.zos_backup_restore( # operation="backup", @@ -636,8 +665,8 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) # assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION2) # finally: -# delete_data_set_or_file(hosts, DATA_SET_NAME) -# delete_data_set_or_file(hosts, DATA_SET_NAME2) +# delete_data_set_or_file(hosts, data_set_name) +# delete_data_set_or_file(hosts, data_set_name2) # delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) # delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) # delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) @@ -647,9 +676,9 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # hosts = ansible_zos_module # try: # delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_NAME) +# delete_data_set_or_file(hosts, data_set_name) # create_sequential_data_set_with_contents( -# hosts, DATA_SET_NAME, DATA_SET_CONTENTS, VOLUME +# hosts, data_set_name, DATA_SET_CONTENTS, VOLUME # ) # results = hosts.all.zos_backup_restore( # operation="backup", @@ -663,7 +692,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # ) # assert_module_did_not_fail(results) # assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_NAME) +# delete_data_set_or_file(hosts, data_set_name) # results = hosts.all.zos_backup_restore( # operation="restore", # backup_name=DATA_SET_BACKUP_LOCATION, @@ -675,7 +704,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # space_type="M", # ) # assert_module_did_not_fail(results) -# assert_data_set_exists_on_volume(hosts, DATA_SET_NAME, VOLUME) +# assert_data_set_exists_on_volume(hosts, data_set_name, VOLUME) # finally: -# delete_data_set_or_file(hosts, DATA_SET_NAME) +# delete_data_set_or_file(hosts, data_set_name) # delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index b2e567dc1..39d04639f 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -12,15 +12,16 @@ # limitations under the License. from __future__ import absolute_import, division, print_function +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name from shellescape import quote import time import re import pytest import inspect +import os __metaclass__ = type -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" TEST_FOLDER_BLOCKINFILE = "/tmp/ansible-core-tests/zos_blockinfile/" c_pgm="""#include <stdio.h> @@ -939,9 +940,8 @@ def test_ds_block_insertafter_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") - test_name = "DST1" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -962,9 +962,8 @@ def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") - test_name = "DST2" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -985,9 +984,8 @@ def test_ds_block_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") - test_name = "DST3" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1008,9 +1006,8 @@ def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") - test_name = "DST4" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1031,9 +1028,8 @@ def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") - test_name = "DST5" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1054,9 +1050,8 @@ def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") - test_name = "DST6" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1077,9 +1072,8 @@ def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") - test_name = "DST7" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1100,9 +1094,8 @@ def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") - test_name = "DST8" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1123,9 +1116,8 @@ def test_ds_block_absent(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(block="", state="absent") - test_name = "DST9" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1147,17 +1139,10 @@ def test_ds_tmp_hlq_option(ansible_zos_module): ds_type = "SEQ" params=dict(insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, tmp_hlq="TMPHLQ") kwargs = dict(backup_name=r"TMPHLQ\..") - test_name = "DST10" - temp_file = "/tmp/zos_lineinfile/" + test_name content = TEST_CONTENT try: - hosts.all.shell(cmd="mkdir -p {0}".format("/tmp/zos_lineinfile/")) - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - if len(hlq) > 8: - hlq = hlq[:8] - ds_full_name = hlq + "." + test_name.upper() + "." + ds_type + ds_full_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_full_name hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) @@ -1182,9 +1167,8 @@ def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, ds hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16) - test_name = "DST11" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1209,9 +1193,8 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup params = dict(block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) if backup_name: params["backup_name"] = backup_name - test_name = "DST12" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1238,21 +1221,22 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype + default_data_set_name = get_tmp_ds_name() params = dict(path="",insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=True) MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) content = TEST_CONTENT if ds_type == "SEQ": - params["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + params["path"] = default_data_set_name+".{0}".format(MEMBER_2) else: - params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + params["path"] = default_data_set_name+"({0})".format(MEMBER_2) try: # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) hosts.all.zos_data_set( batch=[ - { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + { "name": default_data_set_name + "({0})".format(MEMBER_1), "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, @@ -1271,7 +1255,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): hosts.all.file(path="/tmp/disp_shr/", state="directory") hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) hosts.all.shell(cmd="echo \"{0}\" > {1}".format( - call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + call_c_jcl.format(default_data_set_name, MEMBER_1), '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") @@ -1289,7 +1273,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") ######################### # Encoding tests @@ -1301,7 +1285,7 @@ def test_uss_encoding(ansible_zos_module, encoding): insert_data = "Insert this string" params = dict(insertafter="SIMPLE", block=insert_data, state="present") params["encoding"] = encoding - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = TEST_FOLDER_BLOCKINFILE + encoding content = "SIMPLE LINE TO VERIFY" try: hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_BLOCKINFILE)) @@ -1318,6 +1302,7 @@ def test_uss_encoding(ansible_zos_module, encoding): finally: remove_uss_environment(ansible_zos_module) + @pytest.mark.ds @pytest.mark.parametrize("dstype", DS_TYPE) @pytest.mark.parametrize("encoding", ["IBM-1047"]) @@ -1327,9 +1312,8 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): insert_data = "Insert this string" params = dict(insertafter="SIMPLE", block=insert_data, state="present") params["encoding"] = encoding - test_name = "DST13" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = "SIMPLE LINE TO VERIFY" try: hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) @@ -1354,6 +1338,8 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): assert result.get("stdout") == EXPECTED_ENCODING finally: remove_ds_environment(ansible_zos_module, ds_name) + + ######################### # Negative tests ######################### @@ -1375,9 +1361,8 @@ def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): ds_type = 'SEQ' params=dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") params["insertafter"] = 'SOME_NON_EXISTING_PATTERN' - test_name = "DST13" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1409,14 +1394,10 @@ def test_ds_not_supported(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") - test_name = "DST14" - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name try: - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - assert len(hlq) <= 8 or hlq != '' - ds_name = test_name.upper() + "." + ds_type + ds_name = ds_name.upper() + "." + ds_type results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') for result in results.contacted.values(): assert result.get("changed") is True @@ -1434,18 +1415,19 @@ def test_ds_not_supported(ansible_zos_module, dstype): def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype + default_data_set_name = get_tmp_ds_name() params = dict(path="", insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=False) MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + params["path"] = default_data_set_name+"({0})".format(MEMBER_2) content = TEST_CONTENT try: # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) hosts.all.zos_data_set( batch=[ - { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + { "name": default_data_set_name + "({0})".format(MEMBER_1), "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, @@ -1460,7 +1442,7 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): hosts.all.file(path="/tmp/disp_shr/", state="directory") hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) hosts.all.shell(cmd="echo \"{0}\" > {1}".format( - call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + call_c_jcl.format(default_data_set_name, MEMBER_1), '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") @@ -1475,4 +1457,4 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") \ No newline at end of file + hosts.all.zos_data_set(name=default_data_set_name, state="absent") diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 2cc11c9dd..1cb3cb7cb 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -22,7 +22,8 @@ from tempfile import mkstemp import subprocess - +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name __metaclass__ = type @@ -229,6 +230,7 @@ //STDERR DD SYSOUT=* //""" + def populate_dir(dir_path): for i in range(5): with open(dir_path + "/" + "file" + str(i + 1), "w") as infile: @@ -1557,7 +1559,7 @@ def test_copy_template_file_with_non_default_markers(ansible_zos_module): @pytest.mark.template def test_copy_template_file_to_dataset(ansible_zos_module): hosts = ansible_zos_module - dest_dataset = "USER.TEST.TEMPLATE" + dest_dataset = get_tmp_ds_name() temp_dir = tempfile.mkdtemp() try: @@ -1610,7 +1612,7 @@ def test_copy_asa_file_to_asa_sequential(ansible_zos_module): hosts = ansible_zos_module try: - dest = "USER.ASA.SEQ" + dest = get_tmp_ds_name() hosts.all.zos_data_set(name=dest, state="absent") copy_result = hosts.all.zos_copy( @@ -1644,7 +1646,7 @@ def test_copy_asa_file_to_asa_partitioned(ansible_zos_module): hosts = ansible_zos_module try: - dest = "USER.ASA.PDSE" + dest = get_tmp_ds_name() hosts.all.zos_data_set(name=dest, state="absent") full_dest = "{0}(TEST)".format(dest) @@ -1678,7 +1680,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.SRC.SEQ" + src = get_tmp_ds_name() hosts.all.zos_data_set( name=src, state="present", @@ -1686,7 +1688,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): replace=True ) - dest = "USER.ASA.SEQ" + dest = get_tmp_ds_name() hosts.all.zos_data_set(name=dest, state="absent") hosts.all.zos_copy( @@ -1727,7 +1729,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.SRC.SEQ" + src = get_tmp_ds_name() hosts.all.zos_data_set( name=src, state="present", @@ -1735,7 +1737,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): replace=True ) - dest = "USER.ASA.PDSE" + dest = get_tmp_ds_name() full_dest = "{0}(MEMBER)".format(dest) hosts.all.zos_data_set(name=dest, state="absent") @@ -1777,7 +1779,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.SRC.PDSE" + src = get_tmp_ds_name() full_src = "{0}(MEMBER)".format(src) hosts.all.zos_data_set( name=src, @@ -1786,7 +1788,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): replace=True ) - dest = "USER.ASA.SEQ" + dest = get_tmp_ds_name() hosts.all.zos_data_set(name=dest, state="absent") hosts.all.zos_copy( @@ -1827,7 +1829,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.SRC.PDSE" + src = get_tmp_ds_name() full_src = "{0}(MEMBER)".format(src) hosts.all.zos_data_set( name=src, @@ -1836,7 +1838,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): replace=True ) - dest = "USER.ASA.PDSE" + dest = get_tmp_ds_name() full_dest = "{0}(MEMBER)".format(dest) hosts.all.zos_data_set(name=dest, state="absent") @@ -1878,7 +1880,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts = ansible_zos_module try: - src = "USER.ASA.SRC" + src = get_tmp_ds_name() hosts.all.zos_data_set( name=src, state="present", @@ -1960,19 +1962,20 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq @pytest.mark.parametrize("ds_type", [ "PDS", "PDSE", "SEQ"]) def test_copy_dest_lock(ansible_zos_module, ds_type): - DATASET_1 = "USER.PRIVATE.TESTDS" - DATASET_2 = "ADMI.PRIVATE.TESTDS" - MEMBER_1 = "MEM1" + hosts = ansible_zos_module + data_set_1 = get_tmp_ds_name() + data_set_2 = get_tmp_ds_name() + member_1 = "MEM1" if ds_type == "PDS" or ds_type == "PDSE": - src_data_set = DATASET_1 + "({0})".format(MEMBER_1) - dest_data_set = DATASET_2 + "({0})".format(MEMBER_1) + src_data_set = data_set_1 + "({0})".format(member_1) + dest_data_set = data_set_2 + "({0})".format(member_1) else: - src_data_set = DATASET_1 - dest_data_set = DATASET_2 + src_data_set = data_set_1 + dest_data_set = data_set_2 try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DATASET_1, state="present", type=ds_type, replace=True) - hosts.all.zos_data_set(name=DATASET_2, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) @@ -2025,15 +2028,15 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): # clean up c code/object/executable files, jcl hosts.all.shell(cmd='rm -r /tmp/disp_shr') # remove pdse - hosts.all.zos_data_set(name=DATASET_1, state="absent") - hosts.all.zos_data_set(name=DATASET_2, state="absent") + hosts.all.zos_data_set(name=data_set_1, state="absent") + hosts.all.zos_data_set(name=data_set_2, state="absent") @pytest.mark.uss @pytest.mark.seq def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() fd, src = tempfile.mkstemp() os.close(fd) @@ -2086,7 +2089,7 @@ def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): @pytest.mark.seq def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() fd, src = tempfile.mkstemp() os.close(fd) @@ -2141,7 +2144,7 @@ def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): @pytest.mark.seq def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() fd, src = tempfile.mkstemp() os.close(fd) @@ -2172,7 +2175,7 @@ def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module): hosts = ansible_zos_module src = "/tmp/zos_copy_binary_file" - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, state="absent") @@ -2221,7 +2224,7 @@ def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module) ]) def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, state="absent") @@ -2260,7 +2263,7 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): ]) def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="present") @@ -2288,7 +2291,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): ]) def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="absent") @@ -2400,7 +2403,7 @@ def test_copy_ps_to_existing_uss_dir(ansible_zos_module): def test_copy_ps_to_non_existing_ps(ansible_zos_module): hosts = ansible_zos_module src_ds = TEST_PS - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, state="absent") @@ -2426,7 +2429,7 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): def test_copy_ps_to_empty_ps(ansible_zos_module, force): hosts = ansible_zos_module src_ds = TEST_PS - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="present") @@ -2452,7 +2455,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): hosts = ansible_zos_module src_ds = TEST_PS - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="absent") @@ -2483,7 +2486,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): hosts = ansible_zos_module src_ds = TEST_PS - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="absent") @@ -2514,7 +2517,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): def test_backup_sequential_data_set(ansible_zos_module, backup): hosts = ansible_zos_module src = "/etc/profile" - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="present") @@ -2556,7 +2559,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): ]) def test_copy_file_to_non_existing_member(ansible_zos_module, src): hosts = ansible_zos_module - data_set = "USER.TEST.PDS.FUNCTEST" + data_set = get_tmp_ds_name() dest = "{0}(PROFILE)".format(data_set) try: @@ -2602,7 +2605,7 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): ]) def test_copy_file_to_existing_member(ansible_zos_module, src): hosts = ansible_zos_module - data_set = "USER.TEST.PDS.FUNCTEST" + data_set = get_tmp_ds_name() dest = "{0}(PROFILE)".format(data_set) try: @@ -2653,9 +2656,9 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module - src_data_set = "USER.TEST.PDS.SOURCE" + src_data_set = get_tmp_ds_name() src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) - dest_data_set = "USER.TEST.PDS.FUNCTEST" + dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: @@ -2700,9 +2703,9 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module - src_data_set = "USER.TEST.PDS.SOURCE" + src_data_set = get_tmp_ds_name() src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) - dest_data_set = "USER.TEST.PDS.FUNCTEST" + dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: @@ -2746,7 +2749,7 @@ def test_copy_data_set_to_existing_member(ansible_zos_module, args): @pytest.mark.parametrize("is_remote", [False, True]) def test_copy_file_to_non_existing_pdse(ansible_zos_module, is_remote): hosts = ansible_zos_module - dest = "USER.TEST.PDS.FUNCTEST" + dest = get_tmp_ds_name() dest_path = "{0}(PROFILE)".format(dest) src_file = "/etc/profile" @@ -2775,7 +2778,7 @@ def test_copy_file_to_non_existing_pdse(ansible_zos_module, is_remote): def test_copy_dir_to_non_existing_pdse(ansible_zos_module): hosts = ansible_zos_module src_dir = "/tmp/testdir" - dest = "USER.TEST.PDSE.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.file(path=src_dir, state="directory") @@ -2804,7 +2807,7 @@ def test_copy_dir_to_non_existing_pdse(ansible_zos_module): @pytest.mark.pdse def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): hosts = ansible_zos_module - dest = "USER.TEST.PDSE.FUNCTEST" + dest = get_tmp_ds_name() temp_path = tempfile.mkdtemp() src_basename = "source/" @@ -2839,7 +2842,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" - dest = "USER.TEST.PDS.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.file(path=src_dir, state="directory") @@ -2877,9 +2880,9 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module - src_data_set = "USER.TEST.PDS.SOURCE" + src_data_set = get_tmp_ds_name() src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) - dest_data_set = "USER.TEST.PDS.FUNCTEST" + dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: @@ -2922,8 +2925,8 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module - src = "USER.TEST.PDS.SRC" - dest = "USER.TEST.PDS.DEST" + src = get_tmp_ds_name() + dest = get_tmp_ds_name() try: populate_partitioned_data_set(hosts, src, args["src_type"]) @@ -2957,11 +2960,12 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_created): hosts = ansible_zos_module # This dataset and member should be available on any z/OS system. - cobol_src_pds = "USER.COBOL.SRC" + mlq_size = 3 + cobol_src_pds = get_tmp_ds_name(mlq_size) cobol_src_mem = "HELLOCBL" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" - dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + src_lib = get_tmp_ds_name(mlq_size) + dest_lib = get_tmp_ds_name(mlq_size) + dest_lib_aliases = get_tmp_ds_name(mlq_size) pgm_mem = "HELLO" pgm_mem_alias = "ALIAS1" try: @@ -3091,14 +3095,14 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr @pytest.mark.uss def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts = ansible_zos_module - - cobol_src_pds = "USER.COBOL.SRC" + mlq_s=3 + cobol_src_pds = get_tmp_ds_name(mlq_s) cobol_src_mem = "HELLOCBL" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" + src_lib = get_tmp_ds_name(mlq_s) + dest_lib = get_tmp_ds_name(mlq_s) pgm_mem = "HELLO" - dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + dest_lib_aliases = get_tmp_ds_name(mlq_s) pgm_mem_alias = "ALIAS1" uss_dest = "/tmp/HELLO" @@ -3241,19 +3245,17 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts = ansible_zos_module - - cobol_src_pds = "USER.COBOL.SRC" + mlq_size = 3 + cobol_src_pds = get_tmp_ds_name(mlq_size) cobol_src_mem = "HELLOCBL" cobol_src_mem2 = "HICBL2" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" - dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + src_lib = get_tmp_ds_name(mlq_size) + dest_lib = get_tmp_ds_name(mlq_size) + dest_lib_aliases = get_tmp_ds_name(mlq_size) pgm_mem = "HELLO" pgm2_mem = "HELLO2" pgm_mem_alias = "ALIAS1" pgm2_mem_alias = "ALIAS2" - - try: # allocate pds for cobol src code hosts.all.zos_data_set( @@ -3435,12 +3437,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): @pytest.mark.parametrize("is_created", [False, True]) def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts = ansible_zos_module - - cobol_src_pds = "USER.COBOL.SRC" + mlq_s = 3 + cobol_src_pds = get_tmp_ds_name(mlq_s) cobol_src_mem = "HELLOCBL" cobol_src_mem2 = "HICBL2" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" + src_lib = get_tmp_ds_name(mlq_s) + dest_lib = get_tmp_ds_name(mlq_s) pgm_mem = "HELLO" pgm2_mem = "HELLO2" uss_location = "/tmp/loadlib" @@ -3593,13 +3595,13 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts = ansible_zos_module - - cobol_src_pds = "USER.COBOL.SRC" + mlq_s=3 + cobol_src_pds = get_tmp_ds_name(mlq_s) cobol_src_mem = "HELLOCBL" cobol_src_mem2 = "HICBL2" - src_lib = "USER.LOAD.SRC" - dest_lib = "USER.LOAD.DEST" - dest_lib_aliases = "USER.LOAD.DEST.ALIASES" + src_lib = get_tmp_ds_name(mlq_s) + dest_lib = get_tmp_ds_name(mlq_s) + dest_lib_aliases = get_tmp_ds_name(mlq_s) pgm_mem = "HELLO" pgm2_mem = "HELLO2" pgm_mem_alias = "ALIAS1" @@ -3781,7 +3783,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") hosts.all.file(path=uss_dir_path, state="absent") - +#Special case to call a program @pytest.mark.uss def test_copy_executables_uss_to_uss(ansible_zos_module): hosts= ansible_zos_module @@ -3815,8 +3817,9 @@ def test_copy_executables_uss_to_uss(ansible_zos_module): def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts= ansible_zos_module src= "/tmp/c/hello_world.c" + mlq_size = 3 src_jcl_call= "/tmp/c/call_hw_pgm.jcl" - dest = "USER.LOAD.DEST" + dest = get_tmp_ds_name(mlq_size) member = "HELLOSRC" try: generate_executable_uss(hosts, src, src_jcl_call) @@ -3856,7 +3859,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): @pytest.mark.pdse -def test_copy_pds_member_with_system_symbol(ansible_zos_module,): +def test_copy_pds_member_with_system_symbol(ansible_zos_module): """This test is for bug #543 in GitHub. In some versions of ZOAU, datasets.listing can't handle system symbols in volume names and therefore fails to get details from a dataset. @@ -3869,7 +3872,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module,): # The volume for this dataset should use a system symbol. # This dataset and member should be available on any z/OS system. src = "SYS1.SAMPLIB(IZUPRM00)" - dest = "USER.TEST.PDS.DEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set( @@ -3903,10 +3906,10 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module,): @pytest.mark.pdse def test_copy_multiple_data_set_members(ansible_zos_module): hosts = ansible_zos_module - src = "USER.FUNCTEST.SRC.PDS" + src = get_tmp_ds_name() src_wildcard = "{0}(ABC*)".format(src) - dest = "USER.FUNCTEST.DEST.PDS" + dest = get_tmp_ds_name() member_list = ["MEMBER1", "ABCXYZ", "ABCASD"] ds_list = ["{0}({1})".format(src, member) for member in member_list] @@ -3949,9 +3952,9 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): issue was discovered in https://github.com/ansible-collections/ibm_zos_core/issues/560. """ hosts = ansible_zos_module - src = "USER.FUNCTEST.SRC.PDS" + src = get_tmp_ds_name() - dest = "USER.FUNCTEST.DEST.PDS" + dest = get_tmp_ds_name() member_list = ["MEMBER1", "ABCXYZ", "ABCASD"] src_ds_list = ["{0}({1})".format(src, member) for member in member_list] dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] @@ -3994,7 +3997,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.parametrize("ds_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module - data_set = "USER.TEST.PDSE.SOURCE" + data_set = get_tmp_ds_name() src = "{0}(MEMBER)".format(data_set) dest = "/tmp/member" @@ -4036,7 +4039,7 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module - data_set = "USER.TEST.PDSE.SOURCE" + data_set = get_tmp_ds_name() src = "{0}(MEMBER)".format(data_set) dest = "/tmp/member" @@ -4079,7 +4082,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module - src_ds = "USER.TEST.FUNCTEST" + src_ds = get_tmp_ds_name() dest = "/tmp/" dest_path = "/tmp/{0}".format(src_ds) @@ -4124,7 +4127,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module - src_ds = "USER.TEST.FUNCTEST" + src_ds = get_tmp_ds_name() src = "{0}(MEMBER)".format(src_ds) dest = "/tmp/" dest_path = "/tmp/MEMBER" @@ -4170,9 +4173,9 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module - src_ds = "USER.TEST.PDS.SOURCE" + src_ds = get_tmp_ds_name() src = "{0}(MEMBER)".format(src_ds) - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, state="absent") @@ -4210,9 +4213,9 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module - src_ds = "USER.TEST.PDS.SOURCE" + src_ds = get_tmp_ds_name() src = "{0}(MEMBER)".format(src_ds) - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) @@ -4252,7 +4255,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" - dest = "USER.TEST.PDS.FUNCTEST" + dest = get_tmp_ds_name() try: hosts.all.zos_data_set( @@ -4299,7 +4302,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module src = tempfile.mkdtemp() - dest = "USER.TEST.PDS.FUNCTEST" + dest = get_tmp_ds_name() members = ["FILE1", "FILE2", "FILE3", "FILE4", "FILE5"] backup_name = None @@ -4341,11 +4344,16 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) -def test_copy_data_set_to_volume(ansible_zos_module, src_type): +def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module - source = "USER.TEST.FUNCTEST.SRC" - dest = "USER.TEST.FUNCTEST.DEST" - source_member = "USER.TEST.FUNCTEST.SRC(MEMBER)" + source = get_tmp_ds_name() + dest = get_tmp_ds_name() + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + if volume_1 == "SCR03": + volume = volumes.get_available_vol() + volumes.free_vol(volume_1) + volume_1 = volume try: hosts.all.zos_data_set(name=source, type=src_type, state='present') hosts.all.zos_data_set(name=source_member, type="member", state='present') @@ -4353,7 +4361,7 @@ def test_copy_data_set_to_volume(ansible_zos_module, src_type): src=source, dest=dest, remote_src=True, - volume='000000' + volume=volume_1 ) for cp in copy_res.contacted.values(): @@ -4368,7 +4376,7 @@ def test_copy_data_set_to_volume(ansible_zos_module, src_type): for cv in check_vol.contacted.values(): assert cv.get('rc') == 0 - assert "000000" in cv.get('stdout') + assert volume_1 in cv.get('stdout') finally: hosts.all.zos_data_set(name=source, state='absent') hosts.all.zos_data_set(name=dest, state='absent') @@ -4378,7 +4386,7 @@ def test_copy_data_set_to_volume(ansible_zos_module, src_type): def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): hosts = ansible_zos_module src_ds = TEST_VSAM_KSDS - dest_ds = "USER.TEST.VSAM.KSDS" + dest_ds = get_tmp_ds_name() try: copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True) @@ -4403,8 +4411,8 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): @pytest.mark.parametrize("force", [False, True]) def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): hosts = ansible_zos_module - src_ds = "USER.TEST.VSAM.SOURCE" - dest_ds = "USER.TEST.VSAM.KSDS" + src_ds = get_tmp_ds_name() + dest_ds = get_tmp_ds_name() try: create_vsam_data_set(hosts, src_ds, "KSDS", add_data=True, key_length=12, key_offset=0) @@ -4438,8 +4446,8 @@ def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): @pytest.mark.parametrize("backup", [None, "USER.TEST.VSAM.KSDS.BACK"]) def test_backup_ksds(ansible_zos_module, backup): hosts = ansible_zos_module - src = "USER.TEST.VSAM.SOURCE" - dest = "USER.TEST.VSAM.KSDS" + src = get_tmp_ds_name() + dest = get_tmp_ds_name() backup_name = None try: @@ -4486,17 +4494,19 @@ def test_backup_ksds(ansible_zos_module, backup): @pytest.mark.vsam -def test_copy_ksds_to_volume(ansible_zos_module): +def test_copy_ksds_to_volume(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module src_ds = TEST_VSAM_KSDS - dest_ds = "USER.TEST.VSAM.KSDS" + dest_ds = get_tmp_ds_name() + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() try: copy_res = hosts.all.zos_copy( src=src_ds, dest=dest_ds, remote_src=True, - volume="000000" + volume=volume_1 ) verify_copy = get_listcat_information(hosts, dest_ds, "ksds") @@ -4511,16 +4521,17 @@ def test_copy_ksds_to_volume(ansible_zos_module): output = "\n".join(dd_names[0]["content"]) assert "IN-CAT" in output assert re.search(r"\bINDEXED\b", output) - assert re.search(r"\b000000\b", output) + assert re.search(r"\b{0}\b".format(volume_1), output) finally: hosts.all.zos_data_set(name=dest_ds, state="absent") -def test_dest_data_set_parameters(ansible_zos_module): +def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module src = "/etc/profile" - dest = "USER.TEST.DEST" - volume = "000000" + dest = get_tmp_ds_name() + volumes = Volume_Handler(volumes_on_systems) + volume = volumes.get_available_vol() space_primary = 3 space_secondary = 2 space_type = "K" @@ -4612,7 +4623,7 @@ def test_ensure_tmp_cleanup(ansible_zos_module): @pytest.mark.parametrize("force", [False, True]) def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option(ansible_zos_module, force): hosts = ansible_zos_module - dest = "USER.TEST.SEQ.FUNCTEST" + dest = get_tmp_ds_name() src_file = "/etc/profile" tmphlq = "TMPHLQ" try: diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 0a3972646..f5568f55e 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -21,6 +21,8 @@ from pipes import quote from pprint import pprint +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name # TODO: determine if data set names need to be more generic for testcases # TODO: add additional tests to check additional data set creation parameter combinations @@ -34,10 +36,6 @@ ("lds"), ] -VOLUME_000000 = "000000" -VOLUME_222222 = "222222" -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" -DEFAULT_DATA_SET_NAME_WITH_MEMBER = "USER.PRIVATE.TESTDS(TESTME)" TEMP_PATH = "/tmp/jcl" ECHO_COMMAND = "echo {0} > {1}/SAMPLE" @@ -47,16 +45,16 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DEFINE CLUSTER (NAME(USER.PRIVATE.TESTDS) - + DEFINE CLUSTER (NAME({1}) - INDEXED - KEYS(6 1) - RECSZ(80 80) - TRACKS(1,1) - CISZ(4096) - FREESPACE(3 3) - - VOLUMES(000000) ) - - DATA (NAME(USER.PRIVATE.TESTDS.DATA)) - - INDEX (NAME(USER.PRIVATE.TESTDS.INDEX)) + VOLUMES({0}) ) - + DATA (NAME({1}.DATA)) - + INDEX (NAME({1}.INDEX)) /* """ @@ -65,14 +63,14 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DEFINE CLUSTER (NAME('USER.PRIVATE.TESTDS') - + DEFINE CLUSTER (NAME('{1}') - NUMBERED - RECSZ(80 80) - TRACKS(1,1) - REUSE - FREESPACE(3 3) - - VOLUMES(000000) ) - - DATA (NAME('USER.PRIVATE.TESTDS.DATA')) + VOLUMES({0}) ) - + DATA (NAME('{1}.DATA')) /* """ @@ -81,14 +79,14 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DEFINE CLUSTER (NAME('USER.PRIVATE.TESTDS') - + DEFINE CLUSTER (NAME('{1}') - NONINDEXED - RECSZ(80 80) - TRACKS(1,1) - CISZ(4096) - FREESPACE(3 3) - - VOLUMES(000000) ) - - DATA (NAME('USER.PRIVATE.TESTDS.DATA')) + VOLUMES({0}) ) - + DATA (NAME('{1}.DATA')) /* """ @@ -97,12 +95,12 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DEFINE CLUSTER (NAME('USER.PRIVATE.TESTDS') - + DEFINE CLUSTER (NAME('{1}') - LINEAR - TRACKS(1,1) - CISZ(4096) - - VOLUMES(000000) ) - - DATA (NAME(USER.PRIVATE.TESTDS.DATA)) + VOLUMES({0}) ) - + DATA (NAME({1}.DATA)) /* """ @@ -113,9 +111,9 @@ //SYSPRINT DD SYSOUT=A //SYSIN DD * ALLOC - - DSNAME('USER.PRIVATE.TESTDS') - + DSNAME('{1}') - NEW - - VOL(000000) - + VOL({0}) - DSNTYPE(PDS) /* """ @@ -136,7 +134,6 @@ def retrieve_data_set_names(results): for result in results.contacted.values(): if len(result.get("names", [])) > 0: for name in result.get("names"): - if name.lower() != DEFAULT_DATA_SET_NAME.lower(): data_set_names.append(name) return data_set_names @@ -147,17 +144,21 @@ def print_results(results): @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): +def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_systems): + hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + dataset = get_tmp_ds_name(2, 2) try: - hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( src=TEMP_PATH + "/SAMPLE", location="USS", wait=True, wait_time_s=30 ) @@ -169,22 +170,22 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): results = hosts.all.zos_job_output(job_id=submitted_job_id) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # verify first uncatalog was performed - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # verify second uncatalog shows uncatalog already performed - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is False # recatalog the data set results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is True # verify second catalog shows catalog already performed results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is False @@ -192,23 +193,27 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl): # clean up hosts.all.file(path=TEMP_PATH, state="absent") # Added volumes to force a catalog in case they were somehow uncataloged to avoid an duplicate on volume error - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=[VOLUME_000000, VOLUME_222222]) + hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): +def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_systems): + hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + dataset = get_tmp_ds_name(2, 2) try: - hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( src=TEMP_PATH + "/SAMPLE", location="USS", wait=True ) @@ -217,39 +222,43 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 + name=dataset, state="present", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is False # uncatalog the data set - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 + name=dataset, state="present", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000) + hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): +def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_on_systems): + hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume = volumes.get_available_vol() + dataset = get_tmp_ds_name(2, 2) try: - hosts = ansible_zos_module hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( src=TEMP_PATH + "/SAMPLE", location="USS", wait=True ) @@ -258,42 +267,46 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", volumes=VOLUME_000000 + name=dataset, state="present", volumes=volume ) for result in results.contacted.values(): assert result.get("changed") is False # uncatalog the data set - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # ensure data set present results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=dataset, state="present", - volumes=VOLUME_000000, + volumes=volume, replace=True, ) for result in results.contacted.values(): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): +def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_systems): try: + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000 + name=dataset, state="cataloged", volumes=volume_1 ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( src=TEMP_PATH + "/SAMPLE", location="USS", wait=True ) @@ -301,32 +314,37 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl): for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # uncatalog the data set - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # ensure data set absent results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000 + name=dataset, state="absent", volumes=volume_1 ) for result in results.contacted.values(): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ansible_zos_module, jcl): +def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ansible_zos_module, jcl, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="cataloged", volumes=VOLUME_000000) + dataset = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) # verify data set creation was successful @@ -334,15 +352,14 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" # uncatalog the data set - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # Create the same dataset name in different volume - jcl = jcl.replace(VOLUME_000000, VOLUME_222222) hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") - hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl), TEMP_PATH)) + hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) # verify data set creation was successful @@ -352,11 +369,10 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="absent") # ensure data set absent - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent", volumes=VOLUME_000000) + results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) for result in results.contacted.values(): assert result.get("changed") is True - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") for result in results.contacted.values(): assert result.get("changed") is True @@ -365,59 +381,63 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans def test_data_set_creation_when_present_no_replace(ansible_zos_module, dstype): try: hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype, replace=True + name=dataset, state="present", type=dstype, replace=True ) results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype + name=dataset, state="present", type=dstype ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is False assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize("dstype", data_set_types) def test_data_set_creation_when_present_replace(ansible_zos_module, dstype): try: hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype, replace=True + name=dataset, state="present", type=dstype, replace=True ) results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype, replace=True + name=dataset, state="present", type=dstype, replace=True ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize("dstype", data_set_types) def test_data_set_creation_when_absent(ansible_zos_module, dstype): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + dataset = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=dataset, state="absent") results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, state="present", type=dstype + name=dataset, state="present", type=dstype ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") @pytest.mark.parametrize("dstype", data_set_types) def test_data_set_deletion_when_present(ansible_zos_module, dstype): hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=dstype) - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + dataset = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=dataset, state="present", type=dstype) + results = hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -425,8 +445,9 @@ def test_data_set_deletion_when_present(ansible_zos_module, dstype): def test_data_set_deletion_when_absent(ansible_zos_module): hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + dataset = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=dataset, state="absent") + results = hosts.all.zos_data_set(name=dataset, state="absent") for result in results.contacted.values(): assert result.get("changed") is False assert result.get("module_stderr") is None @@ -435,40 +456,42 @@ def test_data_set_deletion_when_absent(ansible_zos_module): def test_batch_data_set_creation_and_deletion(ansible_zos_module): try: hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( batch=[ - {"name": DEFAULT_DATA_SET_NAME, "state": "absent"}, - {"name": DEFAULT_DATA_SET_NAME, "type": "pds", "state": "present"}, - {"name": DEFAULT_DATA_SET_NAME, "state": "absent"}, + {"name": dataset, "state": "absent"}, + {"name": dataset, "type": "pds", "state": "present"}, + {"name": dataset, "state": "absent"}, ] ) for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") def test_batch_data_set_and_member_creation(ansible_zos_module): try: hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( batch=[ - {"name": DEFAULT_DATA_SET_NAME, "type": "pds", "directory_blocks": 5}, - {"name": DEFAULT_DATA_SET_NAME + "(newmem1)", "type": "member"}, + {"name": dataset, "type": "pds", "directory_blocks": 5}, + {"name": dataset + "(newmem1)", "type": "member"}, { - "name": DEFAULT_DATA_SET_NAME + "(newmem2)", + "name": dataset + "(newmem2)", "type": "member", "state": "present", }, - {"name": DEFAULT_DATA_SET_NAME, "state": "absent"}, + {"name": dataset, "state": "absent"}, ] ) for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=dataset, state="absent") c_pgm="""#include <stdio.h> @@ -499,7 +522,7 @@ def test_data_member_force_delete(ansible_zos_module): MEMBER_1, MEMBER_2, MEMBER_3, MEMBER_4 = "MEM1", "MEM2", "MEM3", "MEM4" try: hosts = ansible_zos_module - + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) # set up: # create pdse results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) @@ -611,6 +634,8 @@ def test_data_member_force_delete(ansible_zos_module): def test_repeated_operations(ansible_zos_module): try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) + DEFAULT_DATA_SET_NAME_WITH_MEMBER = DEFAULT_DATA_SET_NAME + "(MEM)" results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, type="PDS", @@ -669,9 +694,13 @@ def test_repeated_operations(ansible_zos_module): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module): +def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, @@ -679,7 +708,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) space_primary=5, space_type="CYL", record_length=15, - volumes=[VOLUME_000000, VOLUME_222222], + volumes=[volume_1, volume_2], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -693,7 +722,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="cataloged", - volumes=[VOLUME_000000, VOLUME_222222], + volumes=[volume_1, volume_2], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -702,9 +731,13 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): +def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, @@ -713,7 +746,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): key_offset=0, space_primary=5, space_type="CYL", - volumes=[VOLUME_000000, VOLUME_222222], + volumes=[volume_1, volume_2], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -727,7 +760,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="cataloged", - volumes=[VOLUME_000000, VOLUME_222222], + volumes=[volume_1, volume_2], ) for result in results.contacted.values(): assert result.get("changed") is True @@ -736,16 +769,19 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -def test_data_set_old_aliases(ansible_zos_module): +def test_data_set_old_aliases(ansible_zos_module, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", format="fb", size="5m", - volume=VOLUME_000000, + volume=volume_1, ) for result in results.contacted.values(): assert result.get("changed") is True @@ -779,6 +815,7 @@ def test_data_set_temp_data_set_name(ansible_zos_module): def test_data_set_temp_data_set_name_batch(ansible_zos_module): try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name() hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( batch=[ @@ -791,14 +828,18 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): dict( state="present", ), - dict(name=DEFAULT_DATA_SET_NAME, state="present"), + dict( + name=DEFAULT_DATA_SET_NAME, + state="present" + ), ] ) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") data_set_names = retrieve_data_set_names(results) - assert len(data_set_names) == 3 + assert len(data_set_names) == 4 for name in data_set_names: - results2 = hosts.all.zos_data_set(name=name, state="absent") + if name != DEFAULT_DATA_SET_NAME: + results2 = hosts.all.zos_data_set(name=name, state="absent") for result in results2.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -819,7 +860,7 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): def test_filesystem_create_and_mount(ansible_zos_module, filesystem): fulltest = True hosts = ansible_zos_module - + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(1, 1) try: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @@ -876,6 +917,7 @@ def test_filesystem_create_and_mount(ansible_zos_module, filesystem): def test_data_set_creation_zero_values(ansible_zos_module): try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", @@ -895,15 +937,16 @@ def test_data_set_creation_zero_values(ansible_zos_module): def test_data_set_creation_with_tmp_hlq(ansible_zos_module): try: - tmphlq = "TMPHLQ" + tmphlq = "ANSIBLE" hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set(state="present", tmp_hlq=tmphlq) dsname = None for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None for dsname in result.get("names"): - assert dsname[:6] == tmphlq + assert dsname[:7] == tmphlq finally: if dsname: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") @@ -912,16 +955,19 @@ def test_data_set_creation_with_tmp_hlq(ansible_zos_module): "formats", ["F","FB", "VB", "FBA", "VBA", "U"], ) -def test_data_set_f_formats(ansible_zos_module, formats): +def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() try: hosts = ansible_zos_module + DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", format=formats, size="5m", - volume=VOLUME_000000, + volume=volume_1, ) for result in results.contacted.values(): assert result.get("changed") is True diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 7b7952387..5d58f2435 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -15,12 +15,13 @@ from shellescape import quote from pprint import pprint from os import path +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name __metaclass__ = type -USS_FILE = "/tmp/encode.data" +USS_FILE = "/tmp/encode_data" USS_NONE_FILE = "/tmp/none" -USS_DEST_FILE = "/tmp/converted.data" +USS_DEST_FILE = "/tmp/converted_data" USS_PATH = "/tmp/src" USS_DEST_PATH = "/tmp/dest" MVS_PS = "encode.ps" @@ -48,18 +49,17 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DELETE ENCODE.TEST.VS SET MAXCC=0 DEFINE CLUSTER - - (NAME(ENCODE.TEST.VS) - + (NAME({0}) - INDEXED - KEYS(4 0) - RECSZ(80 80) - RECORDS(100) - SHAREOPTIONS(2 3) - VOLUMES(000000) ) - - DATA (NAME(ENCODE.TEST.VS.DATA)) - - INDEX (NAME(ENCODE.TEST.VS.INDEX)) + DATA (NAME({0}.DATA)) - + INDEX (NAME({0}.INDEX)) /* """ @@ -77,25 +77,62 @@ /* """ +VSAM_RECORDS = """00000001A record +00000002A record +00000003A record +""" + +def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, key_offset=None): + """Creates a new VSAM on the system. + + Arguments: + hosts (object) -- Ansible instance(s) that can call modules. + name (str) -- Name of the VSAM data set. + type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS) + add_data (bool, optional) -- Whether to add records to the VSAM. + key_length (int, optional) -- Key length (only for KSDS data sets). + key_offset (int, optional) -- Key offset (only for KSDS data sets). + """ + params = dict( + name=name, + type=ds_type, + state="present" + ) + if ds_type == "KSDS": + params["key_length"] = key_length + params["key_offset"] = key_offset + + hosts.all.zos_data_set(**params) + + if add_data: + record_src = "/tmp/zos_copy_vsam_record" + + hosts.all.shell(cmd="echo {0} >> {1}".format(quote(VSAM_RECORDS), record_src)) + hosts.all.zos_encode(src=record_src, dest=name, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) + hosts.all.file(path=record_src, state="absent") def test_uss_encoding_conversion_with_invalid_encoding(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_encode( - src=USS_FILE, - encoding={ - "from": INVALID_ENCODING, - "to": TO_ENCODING, - }, - ) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("msg") is not None - assert result.get("backup_name") is None - assert result.get("changed") is False + try: + hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + results = hosts.all.zos_encode( + src=USS_FILE, + encoding={ + "from": INVALID_ENCODING, + "to": TO_ENCODING, + }, + ) + for result in results.contacted.values(): + assert result.get("msg") is not None + assert result.get("backup_name") is None + assert result.get("changed") is False + finally: + hosts.all.file(path=USS_FILE, state="absent") def test_uss_encoding_conversion_with_the_same_encoding(ansible_zos_module): hosts = ansible_zos_module + hosts.all.copy(content=TEST_DATA, dest=USS_FILE) results = hosts.all.zos_encode( src=USS_FILE, encoding={ @@ -108,6 +145,7 @@ def test_uss_encoding_conversion_with_the_same_encoding(ansible_zos_module): assert result.get("msg") is not None assert result.get("backup_name") is None assert result.get("changed") is False + hosts.all.file(path=USS_FILE, state="absent") def test_uss_encoding_conversion_without_dest(ansible_zos_module): @@ -165,6 +203,8 @@ def test_uss_encoding_conversion_when_dest_not_exists_01(ansible_zos_module): def test_uss_encoding_conversion_when_dest_not_exists_02(ansible_zos_module): hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() + MVS_NONE_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") hosts.all.zos_data_set(name=MVS_NONE_PS, state="absent") @@ -182,6 +222,8 @@ def test_uss_encoding_conversion_when_dest_not_exists_02(ansible_zos_module): assert result.get("dest") == MVS_NONE_PS assert result.get("backup_name") is None assert result.get("changed") is False + hosts.all.zos_data_set(name=MVS_PS, state="absent") + hosts.all.zos_data_set(name=MVS_NONE_PS, state="absent") def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): @@ -197,7 +239,6 @@ def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == USS_DEST_FILE @@ -256,7 +297,6 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): }, backup=True, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_PATH assert result.get("dest") == USS_DEST_PATH @@ -277,6 +317,7 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() hosts.all.copy(content=TEST_DATA, dest=USS_FILE) hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") results = hosts.all.zos_encode( @@ -287,7 +328,6 @@ def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == MVS_PS @@ -295,11 +335,15 @@ def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): assert result.get("changed") is True finally: hosts.all.file(path=USS_FILE, state="absent") + hosts.all.zos_data_set(name=MVS_PS, state="absent") def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() + hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") + hosts.all.copy(content=TEST_DATA, dest=MVS_PS) hosts.all.copy(content="test", dest=USS_DEST_FILE) results = hosts.all.zos_encode( src=MVS_PS, @@ -310,7 +354,6 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): }, backup=True, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_PS assert result.get("dest") == USS_DEST_FILE @@ -323,11 +366,13 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") + hosts.all.zos_data_set(name=MVS_PS, state="absent") def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.copy(content=TEST_DATA, dest=USS_FILE) hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) results = hosts.all.zos_encode( @@ -338,7 +383,6 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == MVS_PDS @@ -346,16 +390,19 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): assert result.get("changed") is True finally: hosts.all.file(path=USS_FILE, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() + MVS_PDS_MEMBER = MVS_PDS + '(MEM)' hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) results = hosts.all.zos_data_set( name=MVS_PDS_MEMBER, type="member", state="present" ) - pprint(vars(results)) for result in results.contacted.values(): # documentation will return changed=False if ds exists and replace=False.. # assert result.get("changed") is True @@ -368,7 +415,6 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == MVS_PDS_MEMBER @@ -376,11 +422,19 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): assert result.get("changed") is True finally: hosts.all.file(path=USS_FILE, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() + MVS_PDS_MEMBER = MVS_PDS + '(MEM)' + hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PDS_MEMBER, type="member", state="present" + ) + hosts.all.copy(content=TEST_DATA, dest=MVS_PDS_MEMBER) hosts.all.copy(content="test", dest=USS_DEST_FILE) results = hosts.all.zos_encode( src=MVS_PDS_MEMBER, @@ -391,7 +445,6 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): }, backup=True, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_PDS_MEMBER assert result.get("dest") == USS_DEST_FILE @@ -404,11 +457,13 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.file(path=USS_PATH, state="directory") hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode1") hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode2") @@ -421,19 +476,11 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_PATH assert result.get("dest") == MVS_PDS assert result.get("backup_name") is None assert result.get("changed") is True - finally: - hosts.all.file(path=USS_PATH, state="absent") - - -def test_uss_encoding_conversion_mvs_pds_to_uss_path(ansible_zos_module): - try: - hosts = ansible_zos_module hosts.all.file(path=USS_DEST_PATH, state="directory") results = hosts.all.zos_encode( src=MVS_PDS, @@ -443,8 +490,8 @@ def test_uss_encoding_conversion_mvs_pds_to_uss_path(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): + assert result.get("src") == MVS_PDS assert result.get("dest") == USS_DEST_PATH assert result.get("backup_name") is None @@ -455,11 +502,22 @@ def test_uss_encoding_conversion_mvs_pds_to_uss_path(ansible_zos_module): assert FROM_ENCODING in result.get("stdout") assert "untagged" not in result.get("stdout") finally: + hosts.all.file(path=USS_PATH, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") hosts.all.file(path=USS_DEST_PATH, state="absent") def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() + MVS_PDS_MEMBER = MVS_PDS + '(MEM)' + MVS_PS = get_tmp_ds_name() + hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") + hosts.all.shell(cmd="cp {0} \"//'{1}'\" ".format(quote(TEST_DATA), MVS_PS)) + hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") + hosts.all.zos_data_set( + name=MVS_PDS_MEMBER, type="member", state="present" + ) results = hosts.all.zos_encode( src=MVS_PS, dest=MVS_PDS_MEMBER, @@ -468,21 +526,23 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): "to": TO_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): + print(result) assert result.get("src") == MVS_PS assert result.get("dest") == MVS_PDS_MEMBER assert result.get("backup_name") is None assert result.get("changed") is True - + hosts.all.zos_data_set(name=MVS_PS, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): try: hosts = ansible_zos_module + MVS_VS = get_tmp_ds_name(3) hosts.all.copy(content=TEST_DATA, dest=USS_FILE) hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL), TEMP_JCL_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True @@ -500,7 +560,6 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == MVS_VS @@ -509,12 +568,16 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): finally: hosts.all.file(path=TEMP_JCL_PATH, state="absent") hosts.all.file(path=USS_FILE, state="absent") + hosts.all.zos_data_set(name=MVS_VS, state="absent") def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.copy(content="test", dest=USS_DEST_FILE) + mlq_size = 3 + MVS_VS = get_tmp_ds_name(mlq_size) + create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + hosts.all.file(path=USS_DEST_FILE, state="touch") results = hosts.all.zos_encode( src=MVS_VS, dest=USS_DEST_FILE, @@ -524,7 +587,6 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): }, backup=True, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_VS assert result.get("dest") == USS_DEST_FILE @@ -542,10 +604,14 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): finally: hosts.all.file(path=USS_DEST_FILE, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") + hosts.all.zos_data_set(name=MVS_VS, state="absent") def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() + MVS_VS = get_tmp_ds_name() + create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq", record_length=TEST_DATA_RECORD_LENGTH) results = hosts.all.zos_encode( @@ -556,16 +622,25 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): "to": TO_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_VS assert result.get("dest") == MVS_PS assert result.get("backup_name") is None assert result.get("changed") is True + hosts.all.zos_data_set(name=MVS_VS, state="absent") + hosts.all.zos_data_set(name=MVS_PS, state="absent") def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): hosts = ansible_zos_module + MVS_VS = get_tmp_ds_name() + MVS_PDS = get_tmp_ds_name() + create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + MVS_PDS_MEMBER = MVS_PDS + '(MEM)' + hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PDS_MEMBER, type="member", state="present" + ) results = hosts.all.zos_encode( src=MVS_VS, dest=MVS_PDS_MEMBER, @@ -575,31 +650,35 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): }, ) hosts.all.zos_data_set(name=MVS_PDS, state="absent") - pprint(vars(results)) for result in results.contacted.values(): + print(result) assert result.get("src") == MVS_VS assert result.get("dest") == MVS_PDS_MEMBER assert result.get("backup_name") is None assert result.get("changed") is True + hosts.all.zos_data_set(name=MVS_VS, state="absent") + hosts.all.zos_data_set(name=MVS_PDS, state="absent") def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): try: hosts = ansible_zos_module + MVS_VS = get_tmp_ds_name(3) + MVS_PS = get_tmp_ds_name() + hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL), TEMP_JCL_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True ) - print("test_uss_encoding_conversion_mvs_ps_to_mvs_vsam") - pprint(vars(results)) for result in results.contacted.values(): assert result.get("jobs") is not None assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + #hosts.all.zos_copy(content=TEST_DATA, dest=MVS_PS) results = hosts.all.zos_encode( src=MVS_PS, dest=MVS_VS, @@ -608,7 +687,6 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_PS assert result.get("dest") == MVS_VS @@ -617,11 +695,13 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): finally: hosts.all.file(path=TEMP_JCL_PATH, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") + hosts.all.zos_data_set(name=MVS_VS, state="absent") def test_pds_backup(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") @@ -649,6 +729,7 @@ def test_pds_backup(ansible_zos_module): def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() tmphlq = "TMPHLQ" hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="absent") @@ -682,6 +763,7 @@ def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): def test_ps_backup(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") @@ -708,6 +790,8 @@ def test_ps_backup(ansible_zos_module): def test_vsam_backup(ansible_zos_module): try: hosts = ansible_zos_module + MVS_VS = get_tmp_ds_name() + MVS_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_VS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") @@ -716,7 +800,7 @@ def test_vsam_backup(ansible_zos_module): ) hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL), TEMP_JCL_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True @@ -741,12 +825,6 @@ def test_vsam_backup(ansible_zos_module): "to": TO_ENCODING, }, ) - contents = hosts.all.shell(cmd="cat \"//'{0}'\"".format(MVS_PS)) - content1 = "" - hosts.all.zos_data_set(name=MVS_PS, state="absent") - for content in contents.contacted.values(): - content1 = content.get("stdout") - print(contents.contacted.values()) hosts.all.zos_encode( src=MVS_VS, encoding={ @@ -767,13 +845,6 @@ def test_vsam_backup(ansible_zos_module): "to": TO_ENCODING, }, ) - - contents = hosts.all.shell(cmd="cat \"//'{0}'\"".format(MVS_PS)) - content2 = "" - print(contents.contacted.values()) - for content in contents.contacted.values(): - content2 = content.get("stdout") - assert content1 and (content1 == content2) finally: hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_VS, state="absent") @@ -784,6 +855,7 @@ def test_vsam_backup(ansible_zos_module): def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") # create and fill PDS hosts.all.zos_data_set(name=MVS_PDS, state="absent") @@ -844,10 +916,11 @@ def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module) def test_uss_backup_entire_folder_to_default_backup_location_compressed( - ansible_zos_module, + ansible_zos_module ): try: hosts = ansible_zos_module + MVS_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") # create and fill PDS hosts.all.zos_data_set(name=MVS_PDS, state="absent") @@ -895,6 +968,7 @@ def test_uss_backup_entire_folder_to_default_backup_location_compressed( def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): try: hosts = ansible_zos_module + MVS_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") @@ -930,4 +1004,4 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): finally: hosts.all.file(path=TEMP_JCL_PATH, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") \ No newline at end of file diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 3b4a9c371..357540876 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -21,6 +21,9 @@ from ansible.utils.hashing import checksum from shellescape import quote +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name + __metaclass__ = type @@ -29,12 +32,6 @@ DUMMY DATA == LINE 03 == """ - -TEST_PS = "USER.PRIV.TEST" -TEST_PS_VB = "USER.PRIV.PSVB" -TEST_PDS = "USER.PRIV.TESTPDS" -TEST_PDS_MEMBER = "USER.PRIV.TESTPDS(MEM1)" -TEST_VSAM = "FETCH.TEST.VS" FROM_ENCODING = "IBM-1047" TO_ENCODING = "ISO8859-1" USS_FILE = "/tmp/fetch.data" @@ -48,18 +45,17 @@ //STEP1 EXEC PGM=IDCAMS //SYSPRINT DD SYSOUT=A //SYSIN DD * - DELETE FETCH.TEST.VS SET MAXCC=0 DEFINE CLUSTER - - (NAME(FETCH.TEST.VS) - + (NAME({1}) - INDEXED - KEYS(4 0) - RECSZ(200 200) - RECORDS(100) - SHAREOPTIONS(2 3) - - VOLUMES(000000) ) - - DATA (NAME(FETCH.TEST.VS.DATA)) - - INDEX (NAME(FETCH.TEST.VS.INDEX)) + VOLUMES({0}) ) - + DATA (NAME({1}.DATA)) - + INDEX (NAME({1}.INDEX)) /* """ KSDS_REPRO_JCL = """//DOREPRO JOB (T043JM,JM00,1,0,0,0),'CREATE KSDS',CLASS=R, @@ -90,21 +86,21 @@ def extract_member_name(data_set): member += data_set[i] return member -def create_and_populate_test_ps_vb(ansible_zos_module): +def create_and_populate_test_ps_vb(ansible_zos_module, name): params=dict( - name=TEST_PS_VB, + name=name, type='SEQ', record_format='VB', record_length='3180', block_size='3190' ) ansible_zos_module.all.zos_data_set(**params) - ansible_zos_module.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS_VB)) + ansible_zos_module.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, name)) -def delete_test_ps_vb(ansible_zos_module): +def delete_test_ps_vb(ansible_zos_module, name): params=dict( - name=TEST_PS_VB, + name=name, state='absent' ) ansible_zos_module.all.zos_data_set(**params) @@ -191,6 +187,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module + TEST_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True) @@ -211,7 +208,8 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): def test_fetch_sequential_data_set_variable_block(ansible_zos_module): hosts = ansible_zos_module - create_and_populate_test_ps_vb(ansible_zos_module) + TEST_PS_VB = get_tmp_ds_name(3) + create_and_populate_test_ps_vb(ansible_zos_module, TEST_PS_VB) params = dict(src=TEST_PS_VB, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS_VB try: @@ -225,12 +223,14 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): finally: if os.path.exists(dest_path): os.remove(dest_path) - delete_test_ps_vb(ansible_zos_module) + delete_test_ps_vb(ansible_zos_module, TEST_PS_VB) def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict(src=TEST_PDS, dest="/tmp/", flat=True) @@ -250,30 +250,33 @@ def test_fetch_partitioned_data_set(ansible_zos_module): shutil.rmtree(dest_path) -def test_fetch_vsam_data_set(ansible_zos_module): +def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - TEMP_JCL_PATH = "/tmp/ansible" - dest_path = "/tmp/" + TEST_VSAM + temp_jcl_path = "/tmp/ansible" + test_vsam = get_tmp_ds_name() + dest_path = "/tmp/" + test_vsam + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() try: # start by creating the vsam dataset (could use a helper instead? ) - hosts.all.file(path=TEMP_JCL_PATH, state="directory") + hosts.all.file(path=temp_jcl_path, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL), TEMP_JCL_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(volume_1, test_vsam)), temp_jcl_path) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(temp_jcl_path), location="USS", wait=True ) hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE)) hosts.all.zos_encode( src=USS_FILE, - dest=TEST_VSAM, + dest=test_vsam, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, ) - params = dict(src=TEST_VSAM, dest="/tmp/", flat=True, is_binary=True) + params = dict(src=test_vsam, dest="/tmp/", flat=True, is_binary=True) results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get("changed") is True @@ -291,7 +294,7 @@ def test_fetch_vsam_data_set(ansible_zos_module): None os.remove(dest_path) hosts.all.file(path=USS_FILE, state="absent") - hosts.all.file(path=TEMP_JCL_PATH, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") def test_fetch_vsam_empty_data_set(ansible_zos_module): @@ -316,7 +319,9 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PDS, state="present") + TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict( @@ -341,6 +346,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module + TEST_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) @@ -361,7 +367,9 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) params = dict(src=TEST_PDS, dest="/tmp/", flat=True, is_binary=True) @@ -383,7 +391,7 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_empty(ansible_zos_module): hosts = ansible_zos_module - src = "USER.TEST.EMPTY.SEQ" + src = get_tmp_ds_name() params = dict(src=src, dest="/tmp/", flat=True) dest_path = "/tmp/" + src try: @@ -404,7 +412,7 @@ def test_fetch_sequential_data_set_empty(ansible_zos_module): def test_fetch_partitioned_data_set_empty_fails(ansible_zos_module): hosts = ansible_zos_module - pds_name = "ZOS.FETCH.TEST.PDS" + pds_name = get_tmp_ds_name() hosts.all.zos_data_set( name=pds_name, type="pds", @@ -425,7 +433,7 @@ def test_fetch_partitioned_data_set_empty_fails(ansible_zos_module): def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): hosts = ansible_zos_module - pds_name = "ZOS.FETCH.TEST.PDS" + pds_name = get_tmp_ds_name() hosts.all.zos_data_set( name=pds_name, type="pds", @@ -434,8 +442,9 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): record_format="fba", record_length=25, ) + hosts.all.zos_data_set(name=pds_name, type="pds") hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="MEMBER", replace="yes") - params = dict(src="ZOS.FETCH.TEST.PDS(MYDATA)", dest="/tmp/", flat=True) + params = dict(src=pds_name + "(MYDATA)", dest="/tmp/", flat=True) dest_path = "/tmp/MYDATA" try: results = hosts.all.zos_fetch(**params) @@ -482,8 +491,9 @@ def test_fetch_missing_uss_file_fails(ansible_zos_module): def test_fetch_missing_mvs_data_set_does_not_fail(ansible_zos_module): hosts = ansible_zos_module + src = get_tmp_ds_name() params = dict( - src="FETCH.TEST.DATA.SET", dest="/tmp/", flat=True, fail_on_missing=False + src=src, dest="/tmp/", flat=True, fail_on_missing=False ) try: results = hosts.all.zos_fetch(**params) @@ -498,6 +508,7 @@ def test_fetch_missing_mvs_data_set_does_not_fail(ansible_zos_module): def test_fetch_partitioned_data_set_member_missing_fails(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() params = dict(src=TEST_PDS + "(DUMMY)", dest="/tmp/", flat=True) try: results = hosts.all.zos_fetch(**params) @@ -510,7 +521,8 @@ def test_fetch_partitioned_data_set_member_missing_fails(ansible_zos_module): def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): hosts = ansible_zos_module - params = dict(src="ZOS.FETCH.TEST.PDS", dest="/tmp/", flat=True) + src = get_tmp_ds_name() + params = dict(src=src, dest="/tmp/", flat=True) try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -522,6 +534,7 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module + TEST_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") @@ -546,7 +559,7 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module - pds_name = "ZOS.FETCH.TEST.PDS" + pds_name = get_tmp_ds_name() dest_path = "/tmp/" + pds_name full_path = dest_path + "/MYDATA" hosts.all.zos_data_set( @@ -596,6 +609,7 @@ def test_fetch_uss_file_insufficient_write_permission_fails(ansible_zos_module): def test_fetch_pds_dir_insufficient_write_permission_fails(ansible_zos_module): hosts = ansible_zos_module + TEST_PDS = get_tmp_ds_name() dest_path = "/tmp/" + TEST_PDS os.mkdir(dest_path) os.chmod(dest_path, stat.S_IREAD) @@ -611,12 +625,14 @@ def test_fetch_pds_dir_insufficient_write_permission_fails(ansible_zos_module): def test_fetch_use_data_set_qualifier(ansible_zos_module): hosts = ansible_zos_module - dest_path = "/tmp/TEST.USER.QUAL" - hosts.all.zos_data_set(name="OMVSADM.TEST.USER.QUAL", type="seq", state="present") - params = dict(src="TEST.USER.QUAL", dest="/tmp/", flat=True, use_qualifier=True) + src = get_tmp_ds_name()[:25] + dest_path = "/tmp/"+ src + hosts.all.zos_data_set(name="OMVSADM." + src, type="seq", state="present") + params = dict(src=src, dest="/tmp/", flat=True, use_qualifier=True) try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): + print(result) assert result.get("changed") is True assert result.get("data_set_type") == "Sequential" assert result.get("module_stderr") is None @@ -624,7 +640,7 @@ def test_fetch_use_data_set_qualifier(ansible_zos_module): finally: if os.path.exists(dest_path): os.remove(dest_path) - hosts.all.zos_data_set(src="OMVSADM.TEST.USER.QUAL", state="absent") + hosts.all.zos_data_set(src="OMVSADM." + src, state="absent") def test_fetch_flat_create_dirs(ansible_zos_module, z_python_interpreter): diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 79df4efac..50782be0b 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -14,6 +14,8 @@ __metaclass__ = type +from ibm_zos_core.tests.helpers.volumes import Volume_Handler + SEQ_NAMES = [ "TEST.FIND.SEQ.FUNCTEST.FIRST", "TEST.FIND.SEQ.FUNCTEST.SECOND", @@ -280,15 +282,18 @@ def test_find_vsam_pattern(ansible_zos_module): ) -def test_find_vsam_in_volume(ansible_zos_module): +def test_find_vsam_in_volume(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - alternate_vsam = "TEST.FIND.ALTER.VSAM" + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() + alternate_vsam = "TEST.FIND.VSAM.SECOND" try: for vsam in VSAM_NAMES: - create_vsam_ksds(vsam, hosts, volume="222222") - create_vsam_ksds(alternate_vsam, hosts, volume="000000") + create_vsam_ksds(vsam, hosts, volume=volume_1) + create_vsam_ksds(alternate_vsam, hosts, volume=volume_2) find_res = hosts.all.zos_find( - patterns=['TEST.FIND.*.*.*'], volumes=['222222'], resource_type='cluster' + patterns=['TEST.FIND.*.*.*'], volumes=[volume_1], resource_type='cluster' ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 @@ -355,4 +360,4 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): finally: hosts.all.zos_data_set( batch=[dict(name=i, state='absent') for i in PDS_NAMES] - ) + ) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index b7c412cd4..c0dc5bdca 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -23,6 +23,7 @@ from shellescape import quote import tempfile +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name # Make sure job list * returns something def test_zos_job_query_func(ansible_zos_module): @@ -45,14 +46,12 @@ def test_zos_job_query_func(ansible_zos_module): """ TEMP_PATH = "/tmp/jcl" -JDATA_SET_NAME = "imstestl.ims1.testq1" -NDATA_SET_NAME = "imstestl.ims1.testq2" -DEFAULT_VOLUME = "000000" # test to show multi wildcard in Job_id query won't crash the search def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module + JDATA_SET_NAME = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) @@ -85,6 +84,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module + NDATA_SET_NAME = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 44dfdbf01..0fe6a59b9 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -20,8 +20,9 @@ import pytest import re import os -from pprint import pprint +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name # ############################################################################## # Configure the job card as needed, most common keyword parameters: @@ -259,25 +260,24 @@ //STEP1 EXEC PGM=BPXBATCH,PARM='PGM /bin/sleep 5'""" TEMP_PATH = "/tmp/jcl" -DATA_SET_NAME = "imstestl.ims1.test05" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" -DEFAULT_VOLUME = "000000" def test_job_submit_PDS(ansible_zos_module): try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(DATA_SET_NAME), location="DATA_SET", wait=True + src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET", wait=True ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -285,7 +285,7 @@ def test_job_submit_PDS(ansible_zos_module): assert result.get("changed") is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_special_characters(ansible_zos_module): @@ -374,9 +374,12 @@ def test_job_submit_LOCAL_BADJCL(ansible_zos_module): assert re.search(r'completion code', repr(result.get("msg"))) -def test_job_submit_PDS_volume(ansible_zos_module): +def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( @@ -384,30 +387,31 @@ def test_job_submit_PDS_volume(ansible_zos_module): ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True, volumes=DEFAULT_VOLUME + name=data_set_name, state="present", type="pds", replace=True, volumes=volume_1 ) hosts.all.shell( - cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="uncataloged", type="pds" + name=data_set_name, state="uncataloged", type="pds" ) - results = hosts.all.zos_job_submit(src=DATA_SET_NAME+"(SAMPLE)", location="DATA_SET", volume=DEFAULT_VOLUME) + results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="DATA_SET", volume=volume_1) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get('changed') is True finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") wait_time_s = 15 @@ -416,15 +420,15 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, data_set_name) ) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=DATA_SET_NAME+"(BPXSLEEP)", + results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", location="DATA_SET", wait_time_s=wait_time_s) for result in results.contacted.values(): @@ -434,12 +438,13 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): assert result.get('duration') <= wait_time_s finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") wait_time_s = 60 @@ -448,15 +453,15 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, data_set_name) ) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=DATA_SET_NAME+"(BPXSLEEP)", + results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", location="DATA_SET", wait_time_s=wait_time_s) for result in results.contacted.values(): @@ -466,12 +471,13 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): assert result.get('duration') <= wait_time_s finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): """This submits a 30 second job and only waits 10 seconds""" try: hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") wait_time_s = 10 @@ -480,15 +486,15 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): ) hosts.all.zos_data_set( - name=DATA_SET_NAME, state="present", type="pds", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, DATA_SET_NAME) + cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, data_set_name) ) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=DATA_SET_NAME+"(BPXSLEEP)", + results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", location="DATA_SET", wait_time_s=wait_time_s) for result in results.contacted.values(): @@ -499,7 +505,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): assert re.search(r'exceeded', repr(result.get("msg"))) finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") @pytest.mark.parametrize("args", [ diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index e415a76e8..256a21c71 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -18,9 +18,10 @@ import pytest import inspect +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name + __metaclass__ = type -DEFAULT_DATA_SET_NAME = "USER.PRIVATE.TESTDS" TEST_FOLDER_LINEINFILE = "/tmp/ansible-core-tests/zos_lineinfile/" c_pgm="""#include <stdio.h> @@ -549,9 +550,8 @@ def test_ds_line_insertafter_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST1" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -572,9 +572,8 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") - test_name = "DST2" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -595,9 +594,8 @@ def test_ds_line_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertafter="EOF", line="export 'ZOAU_ROOT'", state="present") - test_name = "DST3" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -617,9 +615,8 @@ def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(insertbefore="BOF", line="# this is file is for setting env vars", state="present") - test_name = "DST4" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -640,9 +637,8 @@ def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="ZOAU_ROOT=", insertafter="PATH=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST5" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -663,9 +659,8 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", state="present") - test_name = "DST6" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -686,9 +681,8 @@ def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST7" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -709,9 +703,8 @@ def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") - test_name = "DST8" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -732,9 +725,8 @@ def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype) hosts = ansible_zos_module ds_type = dstype params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST9" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -755,9 +747,8 @@ def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype hosts = ansible_zos_module ds_type = dstype params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") - test_name = "DST10" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -778,9 +769,8 @@ def test_ds_line_absent(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(regexp="ZOAU_ROOT=", line="", state="absent") - test_name = "DST11" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -802,17 +792,10 @@ def test_ds_tmp_hlq_option(ansible_zos_module): ds_type = "SEQ" kwargs = dict(backup_name=r"TMPHLQ\..") params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ") - test_name = "DST12" - temp_file = "/tmp/zos_lineinfile/" + test_name content = TEST_CONTENT try: - hosts.all.shell(cmd="mkdir -p {0}".format("/tmp/zos_lineinfile/")) - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - if len(hlq) > 8: - hlq = hlq[:8] - ds_full_name = hlq + "." + test_name.upper() + "." + ds_type + ds_full_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_full_name hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) @@ -837,14 +820,8 @@ def test_ds_not_supported(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - test_name = "DST13" - ds_name = test_name.upper() + "." + ds_type try: - results = hosts.all.shell(cmd='hlq') - for result in results.contacted.values(): - hlq = result.get("stdout") - assert len(hlq) <= 8 or hlq != '' - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + "." + ds_type results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') for result in results.contacted.values(): assert result.get("changed") is True @@ -862,21 +839,22 @@ def test_ds_not_supported(ansible_zos_module, dstype): def test_ds_line_force(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype + default_data_set_name = get_tmp_ds_name() params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="True") MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) content = TEST_CONTENT if ds_type == "SEQ": - params["path"] = DEFAULT_DATA_SET_NAME+".{0}".format(MEMBER_2) + params["path"] = default_data_set_name+".{0}".format(MEMBER_2) else: - params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + params["path"] = default_data_set_name+"({0})".format(MEMBER_2) try: # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) hosts.all.zos_data_set( batch=[ - { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + { "name": default_data_set_name + "({0})".format(MEMBER_1), "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, @@ -895,7 +873,7 @@ def test_ds_line_force(ansible_zos_module, dstype): hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) hosts.all.shell(cmd="echo \"{0}\" > {1}".format( call_c_jcl.format( - DEFAULT_DATA_SET_NAME, + default_data_set_name, MEMBER_1), '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") @@ -914,7 +892,7 @@ def test_ds_line_force(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") @pytest.mark.ds @@ -922,18 +900,19 @@ def test_ds_line_force(ansible_zos_module, dstype): def test_ds_line_force_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype + default_data_set_name = get_tmp_ds_name() params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="False") MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - params["path"] = DEFAULT_DATA_SET_NAME+"({0})".format(MEMBER_2) + params["path"] = default_data_set_name + "({0})".format(MEMBER_2) content = TEST_CONTENT try: # set up: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) hosts.all.zos_data_set( batch=[ - { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + { "name": default_data_set_name + "({0})".format(MEMBER_1), "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, @@ -949,7 +928,7 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) hosts.all.shell(cmd="echo \"{0}\" > {1}".format( call_c_jcl.format( - DEFAULT_DATA_SET_NAME, + default_data_set_name, MEMBER_1), '/tmp/disp_shr/call_c_pgm.jcl')) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") @@ -965,7 +944,7 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) hosts.all.shell(cmd='rm -r /tmp/disp_shr') - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") @pytest.mark.ds @@ -974,9 +953,8 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype params = dict(line='ZOAU_ROOT=/usr/lpp/zoautil/v100', state="present") - test_name = "DST15" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1033,9 +1011,8 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): insert_data = "Insert this string" params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding}) params["encoding"] = encoding - test_name = "DST13" - temp_file = "/tmp/{0}".format(test_name) - ds_name = test_name.upper() + "." + ds_type + ds_name = get_tmp_ds_name() + temp_file = "/tmp/" + ds_name content = "SIMPLE LINE TO VERIFY" try: hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 4021af625..8883ddebc 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -17,6 +17,9 @@ MissingZOAUImport, ) +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name + try: from zoautil_py import Datasets except Exception: @@ -66,7 +69,7 @@ def populate_tmpfile(): return tmp_file_filename -def create_sourcefile(hosts): +def create_sourcefile(hosts, volume): starter = get_sysname(hosts).split(".")[0].upper() if len(starter) < 2: starter = "IMSTESTU" @@ -83,7 +86,7 @@ def create_sourcefile(hosts): hosts.all.shell( cmd="zfsadm define -aggregate " + thisfile - + " -volumes 222222 -cylinders 200 1", + + " -volumes {0} -cylinders 200 1".format(volume), executable=SHELL_EXECUTABLE, stdin="", ) @@ -95,9 +98,11 @@ def create_sourcefile(hosts): return thisfile -def test_basic_mount(ansible_zos_module): +def test_basic_mount(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) try: mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" @@ -116,9 +121,12 @@ def test_basic_mount(ansible_zos_module): hosts.all.file(path="/pythonx/", state="absent") -def test_double_mount(ansible_zos_module): + +def test_double_mount(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) try: hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted") # The duplication here is intentional... want to make sure it is seen @@ -139,9 +147,11 @@ def test_double_mount(ansible_zos_module): hosts.all.file(path="/pythonx/", state="absent") -def test_remount(ansible_zos_module): +def test_remount(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) try: hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted") mount_result = hosts.all.zos_mount( @@ -160,9 +170,11 @@ def test_remount(ansible_zos_module): hosts.all.file(path="/pythonx/", state="absent") -def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module): +def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) tmp_file_filename = "/tmp/testfile.txt" @@ -177,8 +189,8 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module): stdin="", ) - dest = "USER.TEST.BPX.PDS" - dest_path = "USER.TEST.BPX.PDS(AUTO1)" + dest = get_tmp_ds_name() + dest_path = dest + "(AUTO1)" hosts.all.zos_data_set( name=dest, @@ -229,9 +241,11 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module): ) -def test_basic_mount_with_bpx_comment_backup(ansible_zos_module): +def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) tmp_file_filename = "/tmp/testfile.txt" @@ -258,9 +272,9 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module): print("\n====================================================\n") - dest = "USER.TEST.BPX.PDS" - dest_path = "USER.TEST.BPX.PDS(AUTO2)" - back_dest_path = "USER.TEST.BPX.PDS(AUTO2BAK)" + dest = get_tmp_ds_name() + dest_path = dest + "(AUTO2)" + back_dest_path = dest + "(AUTO2BAK)" hosts.all.zos_data_set( name=dest, @@ -347,10 +361,11 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module): record_length=80, ) - -def test_basic_mount_with_tmp_hlq_option(ansible_zos_module): +def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - srcfn = create_sourcefile(hosts) + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + srcfn = create_sourcefile(hosts, volume_1) try: mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" @@ -361,7 +376,7 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module): assert result.get("changed") is True finally: tmphlq = "TMPHLQ" - persist_data_set = "MTEST.TEST.PERSIST" + persist_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=persist_data_set, state="present", type="SEQ") unmount_result = hosts.all.zos_mount( src=srcfn, diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index 72bf0bd0a..fd20a6a92 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -18,10 +18,11 @@ import pytest from pprint import pprint +from ibm_zos_core.tests.helpers.volumes import Volume_Handler +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name + +DATASET = "" EXISTING_DATA_SET = "user.private.proclib" -DEFAULT_DATA_SET = "user.private.rawds" -DEFAULT_DATA_SET_2 = "user.private.rawds2" -DEFAULT_DATA_SET_WITH_MEMBER = "{0}(mem1)".format(DEFAULT_DATA_SET) DEFAULT_PATH = "/tmp/testdir" DEFAULT_PATH_WITH_FILE = "{0}/testfile".format(DEFAULT_PATH) DEFAULT_DD = "MYDD" @@ -29,7 +30,6 @@ SYSPRINT_DD = "SYSPRINT" IDCAMS_STDIN = " LISTCAT ENTRIES('{0}')".format(EXISTING_DATA_SET.upper()) IDCAMS_INVALID_STDIN = " hello world #$!@%!#$!@``~~^$*%" -DEFAULT_VOLUME = "000000" # ---------------------------------------------------------------------------- # @@ -51,7 +51,8 @@ def test_failing_name_format(ansible_zos_module): def test_disposition_new(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -59,7 +60,7 @@ def test_disposition_new(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -73,7 +74,7 @@ def test_disposition_new(ansible_zos_module): assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + results = hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -83,8 +84,9 @@ def test_disposition_new(ansible_zos_module): def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): try: hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="seq", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -93,7 +95,7 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition=disposition, return_content=dict(type="text"), ), @@ -106,14 +108,17 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + results = hosts.all.zos_data_set(name=default_data_set, state="absent") -def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): +def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module tmphlq = "TMPHLQ" + volumes = Volume_Handler(volumes_on_systems) + default_volume = volumes.get_available_vol() + default_data_set = get_tmp_ds_name()[:25] hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="seq", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -123,7 +128,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", return_content=dict(type="text"), replace=True, @@ -132,7 +137,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): space_primary=5, space_secondary=1, space_type="m", - volumes=DEFAULT_VOLUME, + volumes=default_volume, record_format="fb" ), ), @@ -145,7 +150,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 for backup in result.get("backups"): backup.get("backup_name")[:6] == tmphlq - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + results = hosts.all.zos_data_set(name=default_data_set, state="absent") for result in results.contacted.values(): pprint(result) assert result.get("changed", False) is True @@ -155,7 +160,9 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module): def test_new_disposition_for_data_set_members(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -177,7 +184,7 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): pprint(result) assert result.get("ret_code", {}).get("code", -1) == 8 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -187,8 +194,10 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposition): try: hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="pds", state="present", replace=True + name=default_data_set, type="pds", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -210,22 +219,25 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "normal_disposition,changed", [("keep", True), ("delete", True), ("catalog", True), ("uncatalog", True)], ) -def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, changed): +def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, changed, volumes_on_systems): try: hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, + name=default_data_set, type="seq", state="present", replace=True, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -234,10 +246,10 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="shr", disposition_normal=normal_disposition, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], return_content=dict(type="text"), ), ), @@ -249,7 +261,7 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + results = hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -265,7 +277,8 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch def test_space_types(ansible_zos_module, space_type, primary, secondary, expected): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -273,7 +286,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", space_primary=primary, @@ -286,7 +299,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte ], ) - results2 = hosts.all.command(cmd="dls -l -s {0}".format(DEFAULT_DATA_SET)) + results2 = hosts.all.command(cmd="dls -l -s {0}".format(default_data_set)) for result in results.contacted.values(): pprint(result) @@ -297,17 +310,20 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte pprint(result) assert str(expected) in result.get("stdout", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "data_set_type", ["pds", "pdse", "large", "basic", "seq"], ) -def test_data_set_types_non_vsam(ansible_zos_module, data_set_type): +def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -315,32 +331,35 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type=data_set_type, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), ], ) - results = hosts.all.command(cmd="dls {0}".format(DEFAULT_DATA_SET)) + results = hosts.all.command(cmd="dls {0}".format(default_data_set)) for result in results.contacted.values(): pprint(result) assert "BGYSC1103E" not in result.get("stderr", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "data_set_type", ["ksds", "rrds", "lds", "esds"], ) -def test_data_set_types_vsam(ansible_zos_module, data_set_type): +def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -349,22 +368,22 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type=data_set_type, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ), ) if data_set_type != "ksds" else dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type=data_set_type, key_length=5, key_offset=0, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), @@ -372,22 +391,24 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type): ) # * we hope to see EDC5041I An error was detected at the system level when opening a file. # * because that means data set exists and is VSAM so we can't read it - results = hosts.all.command(cmd="head \"//'{0}'\"".format(DEFAULT_DATA_SET)) + results = hosts.all.command(cmd="head \"//'{0}'\"".format(default_data_set)) for result in results.contacted.values(): - pprint(result) assert "EDC5041I" in result.get("stderr", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "record_format", ["u", "vb", "vba", "fb", "fba"], ) -def test_record_formats(ansible_zos_module, record_format): +def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -395,23 +416,23 @@ def test_record_formats(ansible_zos_module, record_format): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", record_format=record_format, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), ], ) - results = hosts.all.command(cmd="dls -l {0}".format(DEFAULT_DATA_SET)) + results = hosts.all.command(cmd="dls -l {0}".format(default_data_set)) for result in results.contacted.values(): pprint(result) assert str(" {0} ".format(record_format.upper())) in result.get("stdout", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -424,15 +445,18 @@ def test_record_formats(ansible_zos_module, record_format): ), ], ) -def test_return_content_type(ansible_zos_module, return_content_type, expected): +def test_return_content_type(ansible_zos_module, return_content_type, expected, volumes_on_systems): try: hosts = ansible_zos_module + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, + name=default_data_set, type="seq", state="present", replace=True, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -441,9 +465,9 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="shr", - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], return_content=dict(type=return_content_type), ), ), @@ -457,7 +481,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected): assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent", volumes=[DEFAULT_VOLUME]) + hosts.all.zos_data_set(name=default_data_set, state="absent", volumes=[volume_1]) @pytest.mark.parametrize( @@ -472,16 +496,19 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected): ], ) def test_return_text_content_encodings( - ansible_zos_module, src_encoding, response_encoding, expected + ansible_zos_module, src_encoding, response_encoding, expected, volumes_on_systems ): try: + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, + name=default_data_set, type="seq", state="present", replace=True, - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -490,9 +517,9 @@ def test_return_text_content_encodings( dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="shr", - volumes=[DEFAULT_VOLUME], + volumes=[volume_1], return_content=dict( type="text", src_encoding=src_encoding, @@ -509,14 +536,15 @@ def test_return_text_content_encodings( assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent", volumes=[DEFAULT_VOLUME]) + hosts.all.zos_data_set(name=default_data_set, state="absent", volumes=[volume_1]) def test_reuse_existing_data_set(ansible_zos_module): try: hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="seq", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -525,7 +553,7 @@ def test_reuse_existing_data_set(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", reuse=True, @@ -541,14 +569,15 @@ def test_reuse_existing_data_set(ansible_zos_module): assert result.get("ret_code", {}).get("code", 0) == 0 assert len(result.get("dd_names", [])) > 0 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_replace_existing_data_set(ansible_zos_module): try: hosts = ansible_zos_module + default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=DEFAULT_DATA_SET, type="seq", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -557,7 +586,7 @@ def test_replace_existing_data_set(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", replace=True, @@ -573,13 +602,14 @@ def test_replace_existing_data_set(ansible_zos_module): assert result.get("ret_code", {}).get("code", 0) == 0 assert len(result.get("dd_names", [])) > 0 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_replace_existing_data_set_make_backup(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_mvs_raw( program_name="IDCAMS", auth=True, @@ -587,7 +617,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", replace=True, @@ -604,7 +634,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", replace=True, @@ -629,13 +659,13 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): ) assert ( result.get("backups")[0].get("original_name").lower() - == DEFAULT_DATA_SET.lower() + == default_data_set.lower() ) for result in results2.contacted.values(): pprint(result) assert "IDCAMS" in result.get("stdout", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") # ---------------------------------------------------------------------------- # @@ -646,7 +676,8 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): def test_input_empty(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -654,7 +685,7 @@ def test_input_empty(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -668,13 +699,14 @@ def test_input_empty(ansible_zos_module): assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_input_large(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") contents = "" for i in range(50000): contents += "this is line {0}\n".format(i) @@ -685,7 +717,7 @@ def test_input_large(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -700,13 +732,14 @@ def test_input_large(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 assert len(result.get("dd_names", [{}])[0].get("content")) > 100000 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_input_provided_as_list(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") contents = [] for i in range(10): contents.append(IDCAMS_STDIN) @@ -717,7 +750,7 @@ def test_input_provided_as_list(ansible_zos_module): dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -732,7 +765,7 @@ def test_input_provided_as_list(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 assert len(result.get("dd_names", [{}])[0].get("content")) > 100 finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -748,7 +781,8 @@ def test_input_provided_as_list(ansible_zos_module): def test_input_return_content_types(ansible_zos_module, return_content_type, expected): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -756,7 +790,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", ), @@ -776,7 +810,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names", [{}])[0].get("content")) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( @@ -799,7 +833,8 @@ def test_input_return_text_content_encodings( ): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -807,7 +842,7 @@ def test_input_return_text_content_encodings( dict( dd_data_set=dict( dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", ), @@ -831,7 +866,7 @@ def test_input_return_text_content_encodings( assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names", [{}])[0].get("content")) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") # ---------------------------------------------------------------------------- # @@ -1302,7 +1337,9 @@ def test_dummy(ansible_zos_module): def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_2 = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1314,7 +1351,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dds=[ dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", return_content=dict(type="text"), @@ -1345,14 +1382,16 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="present", type="seq") + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_2 = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="seq") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1364,7 +1403,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dds=[ dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name=default_data_set, disposition="new", type="seq", replace=True, @@ -1403,7 +1442,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu ) assert ( result.get("backups")[0].get("original_name").lower() - == DEFAULT_DATA_SET.lower() + == default_data_set.lower() ) assert ( result.get("backups")[1].get("original_name").lower() @@ -1413,14 +1452,17 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu assert len(result.get("dd_names", [])) > 0 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") def test_concatenation_with_data_set_member(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="present", type="pds") + default_data_set = get_tmp_ds_name() + DEFAULT_DATA_SET_2 = get_tmp_ds_name() + DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' + hosts.all.zos_data_set(name=default_data_set, state="present", type="pds") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1467,13 +1509,14 @@ def test_concatenation_with_data_set_member(ansible_zos_module): pprint(result) assert "IDCAMS" in result.get("stdout", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") -def test_concatenation_with_unix_dd_and_response(ansible_zos_module): +def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): try: hosts = ansible_zos_module + DEFAULT_DATA_SET_2 = get_tmp_ds_name() hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") @@ -1520,7 +1563,7 @@ def test_concatenation_with_unix_dd_and_response(ansible_zos_module): hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") -def test_concatenation_with_unix_dd_and_response(ansible_zos_module): +def test_concatenation_with_unix_dd_and_response_uss(ansible_zos_module): try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") @@ -1617,7 +1660,7 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): ), dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name="ANSIBLE.USER.PRIVATE.TEST", disposition="shr", return_content=dict(type="text"), ) @@ -1649,7 +1692,7 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): dds=[ dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name="ANSIBLE.USER.PRIVATE.TEST", disposition="shr", return_content=dict(type="text"), ) @@ -1693,7 +1736,7 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): ), dict( dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET, + data_set_name="ANSIBLE.USER.PRIVATE.TEST", disposition="shr", return_content=dict(type="text"), ) @@ -1722,12 +1765,12 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_content): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="present", type="seq") + default_data_set = "ANSIBLE.USER.PRIVATE.TEST" + hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") results = hosts.all.zos_mvs_raw(program_name="idcams", auth=True, dds=dds) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 2 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) @@ -1736,7 +1779,7 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co ) finally: hosts.all.file(name=DEFAULT_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") # ---------------------------------------------------------------------------- # @@ -1747,7 +1790,8 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co def test_authorized_program_run_unauthorized(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=False, @@ -1759,13 +1803,14 @@ def test_authorized_program_run_unauthorized(ansible_zos_module): assert len(result.get("dd_names", [])) == 0 assert "BGYSC0236E" in result.get("msg", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_unauthorized_program_run_authorized(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="DSPURX00", auth=True, @@ -1777,13 +1822,14 @@ def test_unauthorized_program_run_authorized(ansible_zos_module): assert len(result.get("dd_names", [])) == 0 assert "BGYSC0215E" in result.get("msg", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_authorized_program_run_authorized(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1802,13 +1848,14 @@ def test_authorized_program_run_authorized(ansible_zos_module): assert len(result.get("dd_names", [])) == 1 assert "BGYSC0236E" not in result.get("msg", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_unauthorized_program_run_unauthorized(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + default_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="IEFBR14", auth=False, @@ -1820,7 +1867,7 @@ def test_unauthorized_program_run_unauthorized(ansible_zos_module): assert len(result.get("dd_names", [])) == 0 assert "BGYSC0215E" not in result.get("msg", "") finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") def test_missing_program_name(ansible_zos_module): diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index 1a4994800..9860e6d12 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -18,8 +18,8 @@ import ansible.constants import ansible.errors import ansible.utils +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name -DEFAULT_TEMP_DATASET="imstestl.ims1.temp.ps" def test_zos_tso_command_run_help(ansible_zos_module): hosts = ansible_zos_module @@ -49,8 +49,9 @@ def test_zos_tso_command_long_command_128_chars(ansible_zos_module): def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): hosts = ansible_zos_module + default_temp_dataset = get_tmp_ds_name() command_string = [ - "alloc da('{0}') catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)".format(DEFAULT_TEMP_DATASET) + "alloc da('{0}') catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)".format(default_temp_dataset) ] results_allocate = hosts.all.zos_tso_command(commands=command_string) # Validate the correct allocation of dataset @@ -59,34 +60,34 @@ def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): assert item.get("rc") == 0 assert result.get("changed") is True # Validate listds of datasets and validate LISTDS using alias param 'command' of auth command - results = hosts.all.zos_tso_command(commands=["LISTDS '{0}'".format(DEFAULT_TEMP_DATASET)]) + results = hosts.all.zos_tso_command(commands=["LISTDS '{0}'".format(default_temp_dataset)]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Validate LISTDS using alias param 'command' - results = hosts.all.zos_tso_command(command="LISTDS '{0}'".format(DEFAULT_TEMP_DATASET)) + results = hosts.all.zos_tso_command(command="LISTDS '{0}'".format(default_temp_dataset)) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Validate LISTCAT command and an unauth command results = hosts.all.zos_tso_command( - commands=["LISTCAT ENT('{0}')".format(DEFAULT_TEMP_DATASET)] + commands=["LISTCAT ENT('{0}')".format(default_temp_dataset)] ) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Validate remove dataset - results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(DEFAULT_TEMP_DATASET)]) + results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(default_temp_dataset)]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Expect the tso_command to fail here because the previous command will have already deleted the data set # Validate data set was removed by previous call - results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(DEFAULT_TEMP_DATASET)]) + results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(default_temp_dataset)]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 8 diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 2faba0023..c0b1fe293 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -18,6 +18,7 @@ import pytest import tempfile from tempfile import mkstemp +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name __metaclass__ = type @@ -27,10 +28,6 @@ f"{USS_TEMP_DIR}/bar.txt": "bar sample content", f"{USS_TEMP_DIR}/empty.txt":""} USS_EXCLUSION_FILE = f"{USS_TEMP_DIR}/foo.txt" -TEST_PS = "USER.PRIVATE.TESTDS" -TEST_PDS = "USER.PRIVATE.TESTPDS" -HLQ = "USER" -MVS_DEST_ARCHIVE = "USER.PRIVATE.ARCHIVE" USS_DEST_ARCHIVE = "testarchive.dzp" @@ -347,7 +344,7 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): """ - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -355,9 +352,9 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2"]), + dict(dstype="PDSE", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( @@ -369,12 +366,15 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3) + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=DATASET, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -384,7 +384,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{DATASET}({member})", type="member", state="present" ) @@ -396,19 +396,19 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{DATASET}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{DATASET}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) if format == "terse": format_dict["format_options"] = dict(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), + src=DATASET, dest=MVS_DEST_ARCHIVE, format=format_dict, - dest_data_set=dict(name=data_set.get("name"), + dest_data_set=dict(name=DATASET, type="SEQ", record_format=record_format, record_length=record_length), @@ -417,12 +417,12 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec for result in archive_result.contacted.values(): assert result.get("changed") is True assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + assert DATASET in result.get("archived") + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): assert MVS_DEST_ARCHIVE in c_result.get("stdout") - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") if format == "terse": del format_dict["format_options"]["terse_pack"] @@ -431,7 +431,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec src=MVS_DEST_ARCHIVE, format=format_dict, remote_src=True, - dest_data_set=dict(name=data_set.get("name"), + dest_data_set=dict(name=DATASET, type=data_set.get("dstype"), record_format=record_format, record_length=record_length), @@ -442,19 +442,19 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec assert result.get("failed", False) is False # assert result.get("dest") == MVS_DEST_ARCHIVE # assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): - assert data_set.get("name") in c_result.get("stdout") + assert DATASET in c_result.get("stdout") # Check data integrity after unarchive cat_result = hosts.all.shell(cmd=f"dcat \"{ds_to_write}\"") for result in cat_result.contacted.values(): assert result.get("stdout") == test_line finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -462,9 +462,9 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2"]), + dict(dstype="PDSE", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( @@ -476,12 +476,15 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3) + HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=DATASET, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -491,7 +494,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{DATASET}({member})", type="member", state="present" ) @@ -503,9 +506,9 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{DATASET}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{DATASET}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) @@ -513,7 +516,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d if format == "terse": format_dict["format_options"].update(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), + src=DATASET, dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -521,12 +524,12 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d for result in archive_result.contacted.values(): assert result.get("changed") is True assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + assert DATASET in result.get("archived") + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): assert MVS_DEST_ARCHIVE in c_result.get("stdout") - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") if format == "terse": del format_dict["format_options"]["terse_pack"] @@ -543,14 +546,14 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d assert result.get("failed", False) is False # assert result.get("dest") == MVS_DEST_ARCHIVE # assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): - assert data_set.get("name") in c_result.get("stdout") + assert DATASET in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -558,16 +561,19 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQ ="ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=1, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -592,13 +598,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) - # remote data_sets from host - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="drm {0}*".format(DATASET)) if format == "terse": del format_dict["format_options"]["terse_pack"] @@ -615,16 +620,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd="""dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -632,16 +637,19 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQUA = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -666,7 +674,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -675,12 +683,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, assert result.get("failed", False) is False # remote data_sets from host - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="drm {0}*".format(DATASET)) if format == "terse": del format_dict["format_options"]["terse_pack"] # Unarchive action - include_ds = "{0}0".format(data_set.get("name")) + include_ds = "{0}0".format(DATASET) unarchive_result = hosts.all.zos_unarchive( src=MVS_DEST_ARCHIVE, format=format_dict, @@ -694,7 +702,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd="""dls "{0}.*" """.format(HLQUA)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: if target_ds.get("name") == include_ds: @@ -704,10 +712,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, assert target_ds.get("name") not in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -715,16 +723,19 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQUA = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -749,18 +760,18 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) # remote data_sets from host - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) if format == "terse": del format_dict["format_options"]["terse_pack"] # Unarchive action - exclude_ds = "{0}0".format(data_set.get("name")) + exclude_ds = "{0}0".format(DATASET) unarchive_result = hosts.all.zos_unarchive( src=MVS_DEST_ARCHIVE, format=format_dict, @@ -773,7 +784,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd=""" dls "{0}.*" """.format(HLQUA)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: if target_ds.get("name") == exclude_ds: @@ -783,10 +794,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -794,16 +805,19 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -828,13 +842,13 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) # remote data_sets from host - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="drm {0}*".format(DATASET)) if format == "terse": del format_dict["format_options"]["terse_pack"] @@ -851,16 +865,16 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd="""dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="""drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -868,9 +882,9 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ"), - dict(name=TEST_PDS, dstype="PDS"), - dict(name=TEST_PDS, dstype="PDSE"), + dict(dstype="SEQ"), + dict(dstype="PDS"), + dict(dstype="PDSE"), ] ) @pytest.mark.parametrize( @@ -886,8 +900,11 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f """ try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3,3) + HLQUA = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, - base_name=data_set.get("name"), + base_name=DATASET, n=1, type=data_set.get("dstype")) ds_to_write = target_ds_list @@ -912,7 +929,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f format_dict["format_options"].update(terse_pack="SPACK") format_dict["format_options"].update(use_adrdssu=True) hosts.all.zos_archive( - src="{0}*".format(data_set.get("name")), + src=""" "{0}*" """.format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -933,7 +950,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f assert result.get("failed", False) is False assert result.get("src") == MVS_DEST_ARCHIVE - cmd_result = hosts.all.shell(cmd="dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd="""dls "{0}.*" """.format(HLQUA)) for c_result in cmd_result.contacted.values(): for target_ds in target_ds_list: assert target_ds.get("name") in result.get("targets") @@ -942,10 +959,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f assert result.get("changed") is False assert result.get("failed", False) is True finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - +@pytest.mark.ds @pytest.mark.parametrize( "format", [ "terse", @@ -953,9 +970,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f ]) @pytest.mark.parametrize( "data_set", [ - dict(name=TEST_PS, dstype="SEQ", members=[""]), - dict(name=TEST_PDS, dstype="PDS", members=["MEM1", "MEM2"]), - dict(name=TEST_PDS, dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="SEQ", members=[""]), + dict(dstype="PDS", members=["MEM1", "MEM2"]), + dict(dstype="PDSE", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( @@ -967,13 +984,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, data_set, record_length, record_format): try: hosts = ansible_zos_module + MVS_DEST_ARCHIVE = get_tmp_ds_name() + DATASET = get_tmp_ds_name(3) + HLQ = "ANSIBLE" tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") # Clean env - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( - name=data_set.get("name"), + name=DATASET, type=data_set.get("dstype"), state="present", record_length=record_length, @@ -983,7 +1003,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da if data_set.get("dstype") in ["PDS", "PDSE"]: for member in data_set.get("members"): hosts.all.zos_data_set( - name=f"{data_set.get('name')}({member})", + name=f"{DATASET}({member})", type="member", state="present" ) @@ -995,9 +1015,9 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da test_line = "a" * record_length for member in data_set.get("members"): if member == "": - ds_to_write = f"{data_set.get('name')}" + ds_to_write = f"{DATASET}" else: - ds_to_write = f"{data_set.get('name')}({member})" + ds_to_write = f"{DATASET}({member})" hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = dict(name=format) @@ -1005,19 +1025,19 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da if format == "terse": format_dict["format_options"].update(terse_pack="SPACK") archive_result = hosts.all.zos_archive( - src=data_set.get("name"), + src=DATASET, dest=MVS_DEST_ARCHIVE, format=format_dict, ) for result in archive_result.contacted.values(): assert result.get("changed") is True assert result.get("dest") == MVS_DEST_ARCHIVE - assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + assert DATASET in result.get("archived") + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): assert MVS_DEST_ARCHIVE in c_result.get("stdout") - hosts.all.zos_data_set(name=data_set.get("name"), state="absent") + hosts.all.zos_data_set(name=DATASET, state="absent") # fetch archive data set into tmp folder fetch_result = hosts.all.zos_fetch(src=MVS_DEST_ARCHIVE, dest=tmp_folder.name, is_binary=True) @@ -1039,9 +1059,9 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da assert result.get("failed", False) is False # assert result.get("dest") == MVS_DEST_ARCHIVE # assert data_set.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): - assert data_set.get("name") in c_result.get("stdout") + assert DATASET in c_result.get("stdout") # Check data integrity after unarchive cat_result = hosts.all.shell(cmd=f"dcat \"{ds_to_write}\"") @@ -1050,7 +1070,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da finally: - hosts.all.shell(cmd="drm {0}*".format(data_set.get("name"))) + hosts.all.shell(cmd="drm {0}*".format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") tmp_folder.cleanup() @@ -1075,6 +1095,5 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): for result in unarchive_result.contacted.values(): assert result.get("changed") is False assert result.get("failed", False) is True - print(result) finally: tmp_folder.cleanup() diff --git a/tests/helpers/dataset.py b/tests/helpers/dataset.py new file mode 100644 index 000000000..c8050516a --- /dev/null +++ b/tests/helpers/dataset.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import pytest +import string +import random +import time +import re + +def get_tmp_ds_name(mlq_size=7, llq_size=7): + """ Function or test to ensure random names of datasets + the values of middle and last qualifier can change size by parameter, + but by default includes one letter.""" + ds = "ANSIBLE" + "." + ds += "P" + get_random_q(mlq_size).upper() + "." + ds += "T" + str(int(time.time()*1000))[-7:] + "." + ds += "C" + get_random_q(llq_size).upper() + return ds + + +def get_random_q(size=7): + """ Function or test to ensure random hlq of datasets""" + # Generate the first random hlq of size pass as parameter + letters = string.ascii_uppercase + string.digits + random_q = ''.join(random.choice(letters)for iteration in range(size)) + count = 0 + # Generate a random HLQ and verify if is valid, if not, repeat the process + while count < 5 and not re.fullmatch( + r"^(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})", + random_q, + re.IGNORECASE, + ): + random_q = ''.join(random.choice(letters)for iteration in range(size)) + count += 1 + return random_q \ No newline at end of file diff --git a/tests/helpers/volumes.py b/tests/helpers/volumes.py new file mode 100644 index 000000000..b0ed97d30 --- /dev/null +++ b/tests/helpers/volumes.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import pytest +import time +import yaml + +class Volume: + """ Volume class represents a volume on the z system, it tracks if the volume name + and status of the volume with respect to the current test session.""" + def __init__(self, name): + self.name = name + self.in_use = False + + def __str__(self): + return f'The volume {self.name} is in {self.in_use} in use' + + def use(self): + self.in_use = True + + def free(self): + self.in_use = False + +class Volume_Handler: + """ Class to manage use of the volumes generated by a session.""" + def __init__(self, list_volumes): + self.volumes = list_volumes + def init_volumes(list_volumes): + list_volumes = [] + for volume in self.volumes: + list_volumes.append(Volume(volume)) + return list_volumes + self.volumes =init_volumes(list_volumes) + + def get_available_vol(self): + """ Check in the list of volumes one on use or not, also send a default + volume 0 as is the one with more tracks available.""" + for volume in self.volumes: + if not (volume.in_use): + volume.use() + return volume.name + print("Not more volumes in disposal return volume 000000") + return "000000" + + def free_vol(self, vol): + """ Check from the array the volume is already free for other test to use.""" + for volume in self.volumes: + if volume.name == vol: + volume.free() + + def init_volumes(self): + list_volumes = [] + for volume in self.volumes: + list_volumes.append(Volume(volume)) + self.volumes =list_volumes + + +def get_volumes(ansible_zos_module, path): + """Get an array of available volumes""" + # Using the command d u,dasd,online to fill an array of available volumes with the priority + # of of actives (A) and storage (STRG) first then online (O) and storage and if is needed, the + # private ones but actives then to get a flag if is available or not every volumes + # is a instance of a class to manage the use. + hosts = ansible_zos_module + list_volumes = [] + storage_online = [] + flag = False + iteration = 5 + prefer_vols = read_test_config(path) + # The first run of the command d u,dasd,online,,n in the system can conclude with empty data + # to ensure get volumes is why require not more 5 runs and lastly one second of wait. + while not flag and iteration > 0: + all_volumes = hosts.all.zos_operator(cmd="d u,dasd,online,,65536") + time.sleep(1) + if all_volumes is not None: + for volume in all_volumes.contacted.values(): + all_volumes = volume.get('content') + flag = True if len(all_volumes) > 5 else False + iteration -= 1 + # Check if the volume is of storage and is active on prefer but also online as a correct option + for info in all_volumes: + if "ACTIVATED" in info or "-D U," in info or "UNIT" in info: + continue + vol_w_info = info.split() + if vol_w_info[2] == 'O' and vol_w_info[4] == "STRG/RSDNT": + storage_online.append(vol_w_info[3]) + # Insert a volumes for the class ls_Volumes to give flag of in_use and correct manage + for vol in storage_online: + list_volumes.append(vol) + if prefer_vols is not None: + list(map(str, prefer_vols)) + prefer_vols.extend(list_volumes) + prefer_vols = list(filter(lambda item: item is not None, prefer_vols)) + return prefer_vols + else: + return list_volumes + + +def read_test_config(path): + p = path + with open(p, 'r') as file: + config = yaml.safe_load(file) + if "VOLUMES" in config.keys(): + if len(config["VOLUMES"]) > 0: + return config["VOLUMES"] + else: + return None \ No newline at end of file From 4772f7862198b6648209b4ae0d42f324056e1de2 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 6 Feb 2024 12:53:21 -0800 Subject: [PATCH 296/495] Cherry picked 1.9.0 beta.1 into dev (#1207) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Merge Staging release v1.9.0 beta.1 into main (#1205) * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Update ibm_zos_core_meta.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Bugfix/619/mode set for files applied test case (#757) * Add test case for copy dest file * Add comments * Add test for folders * Adjust spaces * Changes for ensure consistency for all tests * Changes of name and clean creations --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Bugfix/381/failed when the job name was null or not found (#747) * Add the verbose for failed when job name was null or not found * Adjust message for what we can get * Whitespaces move * Add code from dev * Ecode utility as is in dev * Year for copyright * Case for having both the jod_id and job_name * Ecode utils functions not in my branch * Add final line ecode * Add fragment * Delete encode function two times, adjust job message and change the fragment * Change variable name for one more descriptive * Restore encode and change one word * Encode * bugfixes * Set up as dev * Better fragment --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Bugfix/660/zos operator reported failure caused by unrelated error response messages (#762) * Add options * Add transparency on the response and test cases * Solve spaces * Add validation to append * Fragment Added * Adjust fail_json on non_zero response * Identation mistakes solved * Solve last idenation problem * Replace prior tooling (makefile) that aidded the development workflow with a new 'ac' command. (#766) * Make file mount script helper Signed-off-by: ddimatos <dimatos@gmail.com> * Comments to mount script Signed-off-by: ddimatos <dimatos@gmail.com> * Staged updated scripts for makefile usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update mount scripts for use with makefile Signed-off-by: ddimatos <dimatos@gmail.com> * updates to correct mounts and add function to mounts-datasets Signed-off-by: ddimatos <dimatos@gmail.com> * adding completed new ac command files for development Signed-off-by: ddimatos <dimatos@gmail.com> * update ignore to more specific with venv Signed-off-by: ddimatos <dimatos@gmail.com> * Correcting ignore to allow for venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * moved logic that checks for info.env to venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Adding changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a path issue when calling venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes issue not being able to run all tests, fixes issue with content being written to collections folder Signed-off-by: ddimatos <dimatos@gmail.com> * Support zSH and update scp to fall back to legacy scp protocal Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Fix incorrect message and remove the cd's before and after ac-test Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * 347 new query fields (#778) * changing job.py to return 7 more fields, and for zos_job_query to pass them through * corrected testing to pull all new values through this assumes zoau 1.2.3 and z/OS at least 2.4 need to test older zoau to make sure this will still work * Added zoau version testing import to job.py so it won't reach for non-existent members. * pep8 and lint required changes * changed test to see if it will pass unit testing * Modified test_zos_data_set_func to skip HFS test if zOS > 02.04 * changed OS test for hfs usage * corrected usage of 'hosts'... removed the definition in prior edit. * changing OS version checker * corrected string extraction for OS version checker * added delete shell to 196/197 (finally of cat/uncat test) removed success message from 830 (version test logic) * removed the mvscmdauth call, as it coincides with some new test failures. * added changed=false back into testing of job_query * correction of zos->zoau name in comments. * Missing fragment in PR 778 New query fields (#780) * added fragment for pr 778 * Added changelog fragment query new fields Added changelog fragment query new fields * Update 778-query-new-fields.yml * Update docs with ansible/ansible-core version, AAP and fix the dated git issue templates (#771) * Doc vesion updates Signed-off-by: ddimatos <dimatos@gmail.com> * Repository template updates and future proofing Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Formatting corrections for release notes Signed-off-by: ddimatos <dimatos@gmail.com> * Upate issue templates with newer version of software Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac command supporting files (#789) * Update ac command supporting files Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_data_set module member description Signed-off-by: ddimatos <dimatos@gmail.com> * Add recently changed module doc from prior commits Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Adding fix for uncataloged vsam and non-vsam data sets Signed-off-by: ddimatos <dimatos@gmail.com> * Encode files recursively and test case for keep behavior. (#772) * Bring the jinja2 solution to dev and add test case * Add fragment * Solve problem z/OS 2.5 HFS * Declaration error solve * Need to check the validation with HFS * Ensure validating z/OS work with HFS * Change inecesary changes and fragments q * Return all test cases to normal * Return all test cases to normal * Create the local test case * Add local test case and change test case to be acurate * Get better cleanup of test-case * Update test_zos_data_set_func.py Equalize test mount func * Update ac to support a single test (#793) * Update ac to support a single test Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update test description Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Return the dynamically created destination attributes (#773) * First iteration to get dynamic values * Spaces and lines rectified * Add validation and extra variable to ensure consistency * Whitespaces * Change imports in test_zos_mount_func * Update test_zos_fetch_func imports * Update all imports for pipelines runs * Revert "Update all imports for pipelines runs" This reverts commit 1b370a2ba3c0001c316e0121ddab82ae7cc6d75d. Return one commit * Update data_set.py imports * Revert "Update data_set.py imports" This reverts commit 37561b0a12e04faaee8307a5541b71469dbe721d. * Update data_set imports * Update data_set imports * Update data_set imports * Restore import * Restore the imports * Add fragment * Solve a typo * Solve z/OS 2.5 HFS * Solve declaration error * Solve HFS and solution by now * Ensure HFS working with HFS * Better working on HFS testing problems * Change to cover many cases and add test * Modified changelog, corrected typos and shortemed file name * Delete 773-Return-the-dynamically-created-destintation-attributres.yaml * Update test_zos_data_set_func.py * Add documentation * Adjust spaces * Solve spaces in documentation * Solve problems on spaces in documentation * Adjust fragment and add validation for vsams * Better redaction to documentation * Solve spaces * Change documentation of code and collection * Change words in documentation --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Updated ac command to clean up the collections directory Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes the issue of parts of a vsam cluster remaining behind and allows user to correctly delete DS not in cat Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volume Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volumegit Signed-off-by: ddimatos <dimatos@gmail.com> * Unbound local var fix Signed-off-by: ddimatos <dimatos@gmail.com> * added changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog based on PR feedback Signed-off-by: ddimatos <dimatos@gmail.com> * Increase ansible supported version to 2.15 Signed-off-by: ddimatos <dimatos@gmail.com> * remove unused imports Signed-off-by: ddimatos <dimatos@gmail.com> * Added 2.16 ignore since our pipeline supports devel which is at this time 2.16 Signed-off-by: ddimatos <dimatos@gmail.com> * Change the line for the functional one (#805) * Add ansible-lint tooling added (#812) * Add ansible-lint tooling Signed-off-by: ddimatos <dimatos@gmail.com> * add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * missing hyphen from command doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac command with doc corrections Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * 439 addf (#821) * initial changes to support F format * adding F option, added basic test looking for failure during ensure-present * added print_results to a failing uncatalog test. * adding more preint_result statements to track down cataloging issue * removed other print statements, added one back (cataloging is just plain finicky) * corrected volume name on new test * removed extra print statement from test code. Added Changelog fragment. * Expanded test case to try 1 of each record format creation. Added mention of 'F' into the documentation of record_format in dataset.py * Bugfix/769/mode option does not behave the same way that it does in the community module (#795) * First suggestion * Add files to be overwriten to the files to be changed * Add functionality to test case to ensure behaivour * Add test case for keep behaivour * Delete test repetition * Delete test case from other branch * Change test cases to ensure works as ansible module * Add fragment and change variable names for clarity * Get better test case and comments * Restore test --------- Co-authored-by: Demetri <dimatos@gmail.com> * bugfix/823/Return destination attributes had hardcoded type and record format (#824) * Add solution * Add fragment * Bufix: Fixes zos_copy and zos_fetch deprecation msg for using _play_context.verbosity (#806) * Add new test case for verbosity check Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy tests to support display.verbosity and nested encoding Signed-off-by: ddimatos <dimatos@gmail.com> * Update test framewor to provide support for adhoc module calls Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_fetch plugin to use the display.verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Lint correction Signed-off-by: ddimatos <dimatos@gmail.com> * Changlog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Change from shell to raw module usage Signed-off-by: ddimatos <dimatos@gmail.com> * remove verbosity from test Signed-off-by: ddimatos <dimatos@gmail.com> * correct indentation Signed-off-by: ddimatos <dimatos@gmail.com> * update changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Stagging v1.6.0 merge into dev (#832) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstrin… * Remove changelog fragments not needed left in main Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml You are currently cherry-picking commit d20097b. Changes to be committed: deleted: changelogs/fragments/1016-remove-randint.yml deleted: changelogs/fragments/1036-apf-try-except.yml deleted: changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml deleted: changelogs/fragments/1042-missing-zoau-imports.yml deleted: changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml deleted: changelogs/fragments/1045-local-uss-unarchive.yml deleted: changelogs/fragments/1048-Update_sanity_tests_ignore.yml deleted: changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml deleted: changelogs/fragments/1049-xmit-temporary-data-sets.yml deleted: changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml deleted: changelogs/fragments/1052-try-except-pass-dd-statement.yml deleted: changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml deleted: changelogs/fragments/1055-remove-subprocess-encode.yml deleted: changelogs/fragments/1056-Update_sanity_ignore_2_16.yml deleted: changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml deleted: changelogs/fragments/1060-remote_tmp_zos_script.yml deleted: changelogs/fragments/1064-corruped-second-copy.yml deleted: changelogs/fragments/1065-rexx-exec-tso_command.yml deleted: changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml deleted: changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml deleted: changelogs/fragments/1074-improve-job-submit-error-msgs.yml deleted: changelogs/fragments/1077-modify-uss-extraction.yml deleted: changelogs/fragments/1089-update-managed_node_doc.yml deleted: changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml deleted: changelogs/fragments/1101-fix-undefined-var.yml deleted: changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml deleted: changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml deleted: changelogs/fragments/1176-copy-members.yml deleted: changelogs/fragments/1195-Add_prefer_volumes_user.yml deleted: changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml deleted: changelogs/fragments/977-remove-hard-coded-vols-and-datasets.yml deleted: changelogs/fragments/v1.9.0-beta.1_summary.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> --- .ansible-lint | 8 ++ .gitignore | 2 +- CHANGELOG.rst | 44 ++++++++- README.md | 12 ++- ac | 6 +- changelogs/.plugin-cache.yaml | 7 +- changelogs/changelog.yaml | 99 ++++++++++++++++++- changelogs/fragments/1016-remove-randint.yml | 5 - changelogs/fragments/1036-apf-try-except.yml | 4 - ...bmit-job-honor-return-output-literally.yml | 4 - .../fragments/1042-missing-zoau-imports.yml | 10 -- ...-is-passing-wrong-value-to-zoauopercmd.yml | 8 -- .../fragments/1045-local-uss-unarchive.yml | 5 - .../1048-Update_sanity_tests_ignore.yml | 8 -- .../1048-update-ac-tool-pyyaml-version.yml | 8 -- .../1049-xmit-temporary-data-sets.yml | 4 - .../1051-try-except-pass-zos_mvs_raw.yml | 4 - .../1052-try-except-pass-dd-statement.yml | 4 - ..._test_collections_on_ansible_core_2_16.yml | 4 - .../1055-remove-subprocess-encode.yml | 4 - .../1056-Update_sanity_ignore_2_16.yml | 4 - ...ti_line_quoted_string_in_content_field.yml | 12 --- .../fragments/1060-remote_tmp_zos_script.yml | 5 - .../fragments/1064-corruped-second-copy.yml | 5 - .../fragments/1065-rexx-exec-tso_command.yml | 4 - ...nt_mvs_copy_destination_attrs_match_up.yml | 5 - ...emote_temporary_files_after_completion.yml | 4 - .../1074-improve-job-submit-error-msgs.yml | 3 - .../fragments/1077-modify-uss-extraction.yml | 3 - .../1089-update-managed_node_doc.yml | 3 - ...cumented_argument_and_import_exception.yml | 10 -- .../fragments/1101-fix-undefined-var.yml | 3 - ...s-fetch-find-remove-hardcoded-datasets.yml | 4 - ...s_to_use_new_alias_and_execute_options.yml | 3 - .../1200-zos_backup_restore-sanity-issues.yml | 4 - docs/source/conf.py | 2 +- docs/source/modules/zos_apf.rst | 4 + docs/source/modules/zos_archive.rst | 3 + docs/source/modules/zos_backup_restore.rst | 3 + docs/source/modules/zos_blockinfile.rst | 3 + docs/source/modules/zos_copy.rst | 16 +++ docs/source/modules/zos_data_set.rst | 4 + docs/source/modules/zos_encode.rst | 2 + docs/source/modules/zos_fetch.rst | 1 + docs/source/modules/zos_gather_facts.rst | 1 + docs/source/modules/zos_job_submit.rst | 5 + docs/source/modules/zos_lineinfile.rst | 4 + docs/source/modules/zos_mount.rst | 1 + docs/source/modules/zos_mvs_raw.rst | 8 ++ docs/source/modules/zos_operator.rst | 1 + .../modules/zos_operator_action_query.rst | 1 + docs/source/modules/zos_script.rst | 4 + docs/source/modules/zos_tso_command.rst | 1 + docs/source/modules/zos_unarchive.rst | 4 + docs/source/release_notes.rst | 86 +++++++++++++++- docs/templates/module.rst.j2 | 2 +- galaxy.yml | 2 +- meta/ibm_zos_core_meta.yml | 4 +- plugins/action/zos_job_submit.py | 2 +- plugins/modules/zos_operator.py | 16 +-- plugins/modules/zos_operator_action_query.py | 16 +-- scripts/venv.sh | 2 +- tests/config.yml | 34 +++++++ .../modules/test_zos_tso_command_func.py | 2 +- 64 files changed, 366 insertions(+), 190 deletions(-) delete mode 100644 changelogs/fragments/1016-remove-randint.yml delete mode 100644 changelogs/fragments/1036-apf-try-except.yml delete mode 100644 changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml delete mode 100644 changelogs/fragments/1042-missing-zoau-imports.yml delete mode 100644 changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml delete mode 100644 changelogs/fragments/1045-local-uss-unarchive.yml delete mode 100644 changelogs/fragments/1048-Update_sanity_tests_ignore.yml delete mode 100644 changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml delete mode 100644 changelogs/fragments/1049-xmit-temporary-data-sets.yml delete mode 100644 changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml delete mode 100644 changelogs/fragments/1052-try-except-pass-dd-statement.yml delete mode 100644 changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml delete mode 100644 changelogs/fragments/1055-remove-subprocess-encode.yml delete mode 100644 changelogs/fragments/1056-Update_sanity_ignore_2_16.yml delete mode 100644 changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml delete mode 100644 changelogs/fragments/1060-remote_tmp_zos_script.yml delete mode 100644 changelogs/fragments/1064-corruped-second-copy.yml delete mode 100644 changelogs/fragments/1065-rexx-exec-tso_command.yml delete mode 100644 changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml delete mode 100644 changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml delete mode 100644 changelogs/fragments/1074-improve-job-submit-error-msgs.yml delete mode 100644 changelogs/fragments/1077-modify-uss-extraction.yml delete mode 100644 changelogs/fragments/1089-update-managed_node_doc.yml delete mode 100644 changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml delete mode 100644 changelogs/fragments/1101-fix-undefined-var.yml delete mode 100644 changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml delete mode 100644 changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml delete mode 100644 changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml create mode 100644 tests/config.yml diff --git a/.ansible-lint b/.ansible-lint index 7325803a2..821806e3a 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -1,6 +1,11 @@ +################################################################################ +# Copyright (c) IBM Corporation 2024 +################################################################################ +# For additonal doc, see https://ansible.readthedocs.io/projects/lint/configuring/ exclude_paths: - .tar.gz - __pycache__/ + - .ansible-lint - .cache/ - .DS_Store - .git/ @@ -35,3 +40,6 @@ parseable: true quiet: false use_default_rules: true verbosity: 1 +# Offline mode disables installation of requirements.yml and schema refreshing often +# found in project_root/collections/requirements.yml. +offline: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index 9c4301951..77064aff1 100644 --- a/.gitignore +++ b/.gitignore @@ -245,7 +245,6 @@ venv/ ENV/ env.bak/ venv.bak/ - ################################### # Ansible z/OS Core Development # ################################### @@ -256,6 +255,7 @@ venv.bak/ .pytest_cache info.env shell_exploits.txt +importer_result.json ################################################################################ # Debugging .ignore, if you want to know why a particular file is being ignored diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a5883246e..505a98474 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,42 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics +v1.9.0-beta.1 +============= + +Release Summary +--------------- + +Release Date: '2024-01-31' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- zos_apf - Improves exception handling if there is a failure parsing the command response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). +- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). +- zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). +- zos_job_submit - The module had undocumented parameter and uses as temporary file when the location of the file is LOCAL. Change now uses the same name as the src for the temporary file removing the addition of tmp_file to the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). +- zos_job_submit - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). +- zos_mvs_raw - when using the dd_input content option for instream-data, if the content was not properly indented according to the program which is generally a blank in columns 1 & 2, those columns would be truncated. Now, when setting instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all content types; string, list of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - zos_mvs_raw - no examples were included with the module that demonstrated using a YAML block indicator, this now includes examples using a YAML block indicator. +- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). + +Bugfixes +-------- + +- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). +- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). +- zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). +- zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). +- zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). +- zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). + v1.8.0 ====== @@ -29,18 +65,18 @@ Minor Changes - zos_copy - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) - zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_job_submit - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). - zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) -- zos_script - add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). -- zos_submit_job - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). -- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). +- zos_script - Add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). +- zos_tso_command - Add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). - zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) Deprecated Features ------------------- -- zos_blockinfile - debug is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). +- zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). Bugfixes -------- diff --git a/README.md b/README.md index 947740ad5..da3b114d4 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ The **IBM z/OS core collection** is following the **Red Hat® Ansible Certified Content for IBM Z®** method of distributing content. Collections will be developed in the open, and when content is ready for use, it is released to -[Ansible Galaxy](https://galaxy.ansible.com/search?keywords=zos_&order_by=-relevance&deprecated=false&type=collection&page=1) +[Ansible Galaxy](https://galaxy.ansible.com/ui/) for community adoption. Once contributors review community usage, feedback, and are satisfied with the content published, the collection will then be released to [Ansible Automation Hub](https://www.ansible.com/products/automation-hub) @@ -62,9 +62,17 @@ For **Ansible Automation Platform** (AAP) users, review the and [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform) for more more information on supported versions of Ansible. +Other Dependencies +================== +This release of the **IBM z/OS core collection** requires the z/OS managed node have: +- [z/OS](https://www.ibm.com/docs/en/zos) V2R4 or later. +- [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). +- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11. +- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) 1.2.5 (or later) but prior to version 1.3. + Copyright ========= -© Copyright IBM Corporation 2020-2023. +© Copyright IBM Corporation 2020-2024. License ======= diff --git a/ac b/ac index 1d06757f5..b5febedbb 100755 --- a/ac +++ b/ac @@ -70,7 +70,7 @@ MAG=$'\e[1;35m' CYN=$'\e[1;36m' ENDC=$'\e[0m' # 0 Docker is up, 1 docker is not up -DOCKER_INFO=`docker info> /dev/null 2>&1;echo $?` +DOCKER_INFO=`podman info> /dev/null 2>&1;echo $?` # ============================================================================== # Arg parsing helpers @@ -298,12 +298,12 @@ ac_sanity(){ if [ "${DOCKER_INFO}" == "0" ]; then if [ "${option_version}" ]; then message "Running ansible-test with docker container and python version ${option_version}." - . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + . $VENV_BIN/activate && export ANSIBLE_TEST_PREFER_PODMAN=1 && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ ${VENV_BIN}/ansible-test sanity --python ${option_version} --requirements --docker default && \ cd ${CURR_DIR}; else message "Running ansible-test with docker container and all python versions." - . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + . $VENV_BIN/activate && export ANSIBLE_TEST_PREFER_PODMAN=1 && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ ${VENV_BIN}/ansible-test sanity --requirements --docker default && \ cd ${CURR_DIR}; fi diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 810d65965..899014cd9 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -106,6 +106,11 @@ plugins: name: zos_ping namespace: '' version_added: 1.1.0 + zos_script: + description: Run scripts in z/OS + name: zos_script + namespace: '' + version_added: 1.8.0 zos_tso_command: description: Execute TSO commands name: zos_tso_command @@ -126,4 +131,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.7.0 +version: 1.9.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 35eeaebb0..c05af6436 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1043,12 +1043,12 @@ releases: terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). minor_changes: - - zos_script - Add support for remote_tmp from the Ansible configuration to - setup where temporary files will be created, replacing the module option tmp_path. - (https://github.com/ansible-collections/ibm_zos_core/pull/1068). - zos_job_submit - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_script - Add support for remote_tmp from the Ansible configuration to + setup where temporary files will be created, replacing the module option tmp_path. + (https://github.com/ansible-collections/ibm_zos_core/pull/1068). - zos_tso_command - Add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). release_summary: 'Release Date: ''2023-12-08'' @@ -1176,3 +1176,96 @@ releases: name: zos_script namespace: '' release_date: '2023-10-24' + 1.9.0-beta.1: + changes: + bugfixes: + - zos_copy - When copying an executable data set with aliases and destination + did not exist, destination data set was created with wrong attributes. Fix + now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). + - zos_copy - When performing a copy operation to an existing file, the copied + file resulted in having corrupted contents. Fix now implements a workaround + to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). + - zos_job_output - When passing a job ID or name less than 8 characters long, + the module sent the full stack trace as the module's message. Change now allows + the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_job_query - The module handling ZOAU import errors obscured the original + traceback when an import error ocurred. Fix now passes correctly the context + to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). + - zos_job_query - When passing a job ID or name less than 8 characters long, + the module sent the full stack trace as the module's message. Change now allows + the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_operator - The module handling ZOAU import errors obscured the original + traceback when an import error ocurred. Fix now passes correctly the context + to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). + - zos_unarchive - Using a local file with a USS format option failed when sending + to remote because dest_data_set option had an empty dictionary. Fix now leaves + dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). + - zos_unarchive - When unarchiving USS files, the module left temporary files + on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). + minor_changes: + - zos_apf - Improves exception handling if there is a failure parsing the command + response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). + - zos_copy - Improve zos_copy performance when copying multiple members from + one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). + - zos_job_output - When passing a job ID and owner the module take as mutually + exclusive. Change now allows the use of a job ID and owner at the same time. + (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). + - zos_job_submit - The module had undocumented parameter and uses as temporary + file when the location of the file is LOCAL. Change now uses the same name + as the src for the temporary file removing the addition of tmp_file to the + arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). + - zos_job_submit - The module handling ZOAU import errors obscured the original + traceback when an import error ocurred. Fix now passes correctly the context + to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). + - zos_mvs_raw - when using the dd_input content option for instream-data, if + the content was not properly indented according to the program which is generally + a blank in columns 1 & 2, those columns would be truncated. Now, when setting + instream-data, the module will ensure that all lines contain a blank in columns + 1 and 2 and add blanks when not present while retaining a maximum length of + 80 columns for any line. This is true for all content types; string, list + of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). + - zos_mvs_raw - no examples were included with the module that demonstrated + using a YAML block indicator, this now includes examples using a YAML block + indicator. + - zos_tso_command - add example for executing explicitly a REXX script from + a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). + release_summary: 'Release Date: ''2024-01-31'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1016-remove-randint.yml + - 1036-apf-try-except.yml + - 1042-missing-zoau-imports.yml + - 1045-local-uss-unarchive.yml + - 1048-Update_sanity_tests_ignore.yml + - 1048-update-ac-tool-pyyaml-version.yml + - 1051-try-except-pass-zos_mvs_raw.yml + - 1052-try-except-pass-dd-statement.yml + - 1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml + - 1055-remove-subprocess-encode.yml + - 1056-Update_sanity_ignore_2_16.yml + - 1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml + - 1064-corruped-second-copy.yml + - 1065-rexx-exec-tso_command.yml + - 1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml + - 1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml + - 1074-improve-job-submit-error-msgs.yml + - 1077-modify-uss-extraction.yml + - 1078-short_job_name_sends_back_a_value_error.yaml + - 1091-Update_undocumented_argument_and_import_exception.yml + - 1101-fix-undefined-var.yml + - 1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml + - 1163-Refactor_calls_to_use_new_alias_and_execute_options.yml + - 1176-copy-members.yml + - 1195-Add_prefer_volumes_user.yml + - 1200-zos_backup_restore-sanity-issues.yml + - 977-remove-hard-coded-vols-and-datasets.yml + - v1.9.0-beta.1_summary.yml + release_date: '2024-02-01' diff --git a/changelogs/fragments/1016-remove-randint.yml b/changelogs/fragments/1016-remove-randint.yml deleted file mode 100644 index baac7fff9..000000000 --- a/changelogs/fragments/1016-remove-randint.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: - - module_utils/data_set - Replace the use of random.randint to random.sample - to generate random member names, random.randint raised a warning while - scanning with bandit. - (https://github.com/ansible-collections/ibm_zos_core/pull/1016) \ No newline at end of file diff --git a/changelogs/fragments/1036-apf-try-except.yml b/changelogs/fragments/1036-apf-try-except.yml deleted file mode 100644 index 16e8ab6c7..000000000 --- a/changelogs/fragments/1036-apf-try-except.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_apf - Improves exception handling if there is a failure - parsing the command response when operation selected is list. - (https://github.com/ansible-collections/ibm_zos_core/pull/1036). diff --git a/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml b/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml deleted file mode 100644 index 726397d2d..000000000 --- a/changelogs/fragments/1041-bug-zos-submit-job-honor-return-output-literally.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_submit_job - Previous code did not return output, but still requested job data from the target system. - This changes to honor return_output=false by not querying the job dd segments at all. - (https://github.com/ansible-collections/ibm_zos_core/pull/1058). \ No newline at end of file diff --git a/changelogs/fragments/1042-missing-zoau-imports.yml b/changelogs/fragments/1042-missing-zoau-imports.yml deleted file mode 100644 index a91f6de48..000000000 --- a/changelogs/fragments/1042-missing-zoau-imports.yml +++ /dev/null @@ -1,10 +0,0 @@ -bugfixes: - - zos_job_query - The module handling ZOAU import errors obscured the - original traceback when an import error ocurred. Fix now passes correctly - the context to the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - - - zos_operator - The module handling ZOAU import errors obscured the - original traceback when an import error ocurred. Fix now passes correctly - the context to the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/1042). \ No newline at end of file diff --git a/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml b/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml deleted file mode 100644 index 06f9a264a..000000000 --- a/changelogs/fragments/1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml +++ /dev/null @@ -1,8 +0,0 @@ -bugfixes: - - zos_operator - The module was ignoring the wait time argument. - The module now passes the wait time argument to ZOAU. - (https://github.com/ansible-collections/ibm_zos_core/pull/1044). - - - zos_operator_action_query - The module was ignoring the wait time argument. - The module now passes the wait time argument to ZOAU. - (https://github.com/ansible-collections/ibm_zos_core/pull/1044). \ No newline at end of file diff --git a/changelogs/fragments/1045-local-uss-unarchive.yml b/changelogs/fragments/1045-local-uss-unarchive.yml deleted file mode 100644 index 84bc5508c..000000000 --- a/changelogs/fragments/1045-local-uss-unarchive.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_unarchive - Using a local file with a USS format option failed when sending to - remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set - as None when using a USS format option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1045). \ No newline at end of file diff --git a/changelogs/fragments/1048-Update_sanity_tests_ignore.yml b/changelogs/fragments/1048-Update_sanity_tests_ignore.yml deleted file mode 100644 index 5d2960d28..000000000 --- a/changelogs/fragments/1048-Update_sanity_tests_ignore.yml +++ /dev/null @@ -1,8 +0,0 @@ -trivial: - - zos_copy - change data type of parameter src from path to str inside AnsibleModule util. - - zos_copy - deprecate add_file_common_args argument. - - zos_copy - add owner and group to parameters inside AnsibleModule util. - - zos_copy - remove copy_member of AnsibleModule util as parameter and add to code logic. - - zos_copy - remove doc-default-does-not-match-spec ignore to ignore 2.14. - - zos_copy - remove doc-type-does-not-match-spec ignore to ignore 2.14. - (https://github.com/ansible-collections/ibm_zos_core/pull/1048). diff --git a/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml b/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml deleted file mode 100644 index 309862cfb..000000000 --- a/changelogs/fragments/1048-update-ac-tool-pyyaml-version.yml +++ /dev/null @@ -1,8 +0,0 @@ -trivial: - - ac - PyYaml version 5.4.1 was being installed and not having a wheel to go - with the python versions 11 and 12. This fixes the issue by freezing the - the version to 6.0.1. - - ac - would not discover python installations not in PATH. This change - extends the search path to include common python installation locations - not in path. - (https://github.com/ansible-collections/ibm_zos_core/pull/1083). diff --git a/changelogs/fragments/1049-xmit-temporary-data-sets.yml b/changelogs/fragments/1049-xmit-temporary-data-sets.yml deleted file mode 100644 index 5ef0f2078..000000000 --- a/changelogs/fragments/1049-xmit-temporary-data-sets.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the - temporary data sets created. Fix now removes the temporary data sets. - (https://github.com/ansible-collections/ibm_zos_core/pull/1049). \ No newline at end of file diff --git a/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml b/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml deleted file mode 100644 index 59b33d02c..000000000 --- a/changelogs/fragments/1051-try-except-pass-zos_mvs_raw.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_mvs_raw - Removed Try, Except, Pass from the code, try block is in place to ignore any errors, - pass statement was changed to a variable assignment. This does not change any behavior. - (https://github.com/ansible-collections/ibm_zos_core/pull/1051). \ No newline at end of file diff --git a/changelogs/fragments/1052-try-except-pass-dd-statement.yml b/changelogs/fragments/1052-try-except-pass-dd-statement.yml deleted file mode 100644 index 42315337c..000000000 --- a/changelogs/fragments/1052-try-except-pass-dd-statement.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_mvs_raw - Removed Try, Except, Pass from the code, instead catching DatasetDeleteError - and pass only in that case, any other exception will be raised. - (https://github.com/ansible-collections/ibm_zos_core/pull/1052). diff --git a/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml b/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml deleted file mode 100644 index ac3c24bb5..000000000 --- a/changelogs/fragments/1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_archive - add missing-gplv3-license ignore to ignore 2.16. - - zos_unarchive - add missing-gplv3-license ignore to ignore 2.16. - (https://github.com/ansible-collections/ibm_zos_core/pull/1053). diff --git a/changelogs/fragments/1055-remove-subprocess-encode.yml b/changelogs/fragments/1055-remove-subprocess-encode.yml deleted file mode 100644 index 7e458dc09..000000000 --- a/changelogs/fragments/1055-remove-subprocess-encode.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - encode_utils - Removed use of subprocess from system utils, since the only - use of it could be replaced for AnsibleModule runcommand method. - (https://github.com/ansible-collections/ibm_zos_core/pull/1055). \ No newline at end of file diff --git a/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml b/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml deleted file mode 100644 index a5b192519..000000000 --- a/changelogs/fragments/1056-Update_sanity_ignore_2_16.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_copy - remove doc-default-does-not-match-spec 2.16 ignore file. - - zos_copy - remove doc-type-does-not-match-spec 2.16 ignore file. - (https://github.com/ansible-collections/ibm_zos_core/pull/1056). diff --git a/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml b/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml deleted file mode 100644 index 49a3a3516..000000000 --- a/changelogs/fragments/1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml +++ /dev/null @@ -1,12 +0,0 @@ -minor_changes: - - zos_mvs_raw - when using the dd_input content option for instream-data, if - the content was not properly indented according to the program which is - generally a blank in columns 1 & 2, those columns would be truncated. Now, - when setting instream-data, the module will ensure that all lines contain - a blank in columns 1 and 2 and add blanks when not present while retaining - a maximum length of 80 columns for any line. This is true for all content - types; string, list of strings and when using a YAML block indicator. - (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - - zos_mvs_raw - no examples were included with the module that demonstrated - using a YAML block indicator, this now includes examples using a YAML - block indicator. diff --git a/changelogs/fragments/1060-remote_tmp_zos_script.yml b/changelogs/fragments/1060-remote_tmp_zos_script.yml deleted file mode 100644 index 1185f3a1b..000000000 --- a/changelogs/fragments/1060-remote_tmp_zos_script.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_script - add support for remote_tmp from the Ansible - configuration to setup where temporary files will be created, - replacing the module option tmp_path. - (https://github.com/ansible-collections/ibm_zos_core/pull/1060). \ No newline at end of file diff --git a/changelogs/fragments/1064-corruped-second-copy.yml b/changelogs/fragments/1064-corruped-second-copy.yml deleted file mode 100644 index 82a04426e..000000000 --- a/changelogs/fragments/1064-corruped-second-copy.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - When performing a copy operation to an existing file, the copied - file resulted in having corrupted contents. Fix now implements a workaround - to not use the specific copy routine that corrupts the file contents. - (https://github.com/ansible-collections/ibm_zos_core/pull/1064). diff --git a/changelogs/fragments/1065-rexx-exec-tso_command.yml b/changelogs/fragments/1065-rexx-exec-tso_command.yml deleted file mode 100644 index 5d20ccfd6..000000000 --- a/changelogs/fragments/1065-rexx-exec-tso_command.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_tso_command - add example for executing explicitly a REXX script from - a data set. - (https://github.com/ansible-collections/ibm_zos_core/pull/1065). diff --git a/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml b/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml deleted file mode 100644 index 05e1c9ce4..000000000 --- a/changelogs/fragments/1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - When copying an executable data set with aliases and destination did not exist, - destination data set was created with wrong attributes. Fix now creates destination data set - with the same attributes as the source. - (https://github.com/ansible-collections/ibm_zos_core/pull/1066). \ No newline at end of file diff --git a/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml b/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml deleted file mode 100644 index 6532e60ae..000000000 --- a/changelogs/fragments/1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. - Change now removes temporary files. - (https://github.com/ansible-collections/ibm_zos_core/pull/1073). \ No newline at end of file diff --git a/changelogs/fragments/1074-improve-job-submit-error-msgs.yml b/changelogs/fragments/1074-improve-job-submit-error-msgs.yml deleted file mode 100644 index 769131a2b..000000000 --- a/changelogs/fragments/1074-improve-job-submit-error-msgs.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_job_submit - Improve error messages in zos_job_submit to be clearer. - (https://github.com/ansible-collections/ibm_zos_core/pull/1074). diff --git a/changelogs/fragments/1077-modify-uss-extraction.yml b/changelogs/fragments/1077-modify-uss-extraction.yml deleted file mode 100644 index 0886dfab1..000000000 --- a/changelogs/fragments/1077-modify-uss-extraction.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_unarchive - Change the USS file extraction method from extractall to a custom function to extract filtered members. - (https://github.com/ansible-collections/ibm_zos_core/pull/1077). \ No newline at end of file diff --git a/changelogs/fragments/1089-update-managed_node_doc.yml b/changelogs/fragments/1089-update-managed_node_doc.yml deleted file mode 100644 index e0c7ff18b..000000000 --- a/changelogs/fragments/1089-update-managed_node_doc.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - managed node doc - updated the managed node documentation links and content. - (https://github.com/ansible-collections/ibm_zos_core/pull/1089). diff --git a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml b/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml deleted file mode 100644 index d1d1560f8..000000000 --- a/changelogs/fragments/1091-Update_undocumented_argument_and_import_exception.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: - - zos_job_submit - The module handling ZOAU import errors obscured the - original traceback when an import error ocurred. Fix now passes correctly - the context to the user. - (https://github.com/ansible-collections/ibm_zos_core/pull/1091). - - - zos_job_submit - The module had undocumented parameter and uses as temporary file - when the location of the file is LOCAL. Change now uses the same name as the src - for the temporary file removing the addition of tmp_file to the arguments. - (https://github.com/ansible-collections/ibm_zos_core/pull/1091). diff --git a/changelogs/fragments/1101-fix-undefined-var.yml b/changelogs/fragments/1101-fix-undefined-var.yml deleted file mode 100644 index 1d9eeba3c..000000000 --- a/changelogs/fragments/1101-fix-undefined-var.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_submit - Fix undefined variable that got deleted during a conflicting merge. - (https://github.com/ansible-collections/ibm_zos_core/pull/1101). \ No newline at end of file diff --git a/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml b/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml deleted file mode 100644 index 7a470d57c..000000000 --- a/changelogs/fragments/1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_fetch - remove hardcoded datasets and dependencies from test cases. - - zos_find - remove hardcoded datasets and dependencies from test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1158). \ No newline at end of file diff --git a/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml b/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml deleted file mode 100644 index 6cd512427..000000000 --- a/changelogs/fragments/1163-Refactor_calls_to_use_new_alias_and_execute_options.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Change call to ZOAU python API by using a dictionary to arguments. - (https://github.com/ansible-collections/ibm_zos_core/pull/1163). \ No newline at end of file diff --git a/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml b/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml deleted file mode 100644 index 27d40f560..000000000 --- a/changelogs/fragments/1200-zos_backup_restore-sanity-issues.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_backup_restore - Added space type choices to argument spec to remove - validate-modules:doc-choices-do-not-match-spec. - (https://github.com/ansible-collections/ibm_zos_core/pull/1200). diff --git a/docs/source/conf.py b/docs/source/conf.py index 9c7a6994e..f8dd69685 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -16,7 +16,7 @@ ############################################################################## project = 'IBM z/OS core collection' -copyright = '2020, 2021, IBM' +copyright = '2024, IBM' author = 'IBM' # The full version, including alpha/beta/rc tags diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index 195b34611..e9a55c007 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -54,6 +54,7 @@ force_dynamic | **required**: False | **type**: bool + | **default**: False volume @@ -78,6 +79,7 @@ sms | **required**: False | **type**: bool + | **default**: False operation @@ -144,6 +146,7 @@ persistent | **required**: False | **type**: bool + | **default**: False backup_name @@ -203,6 +206,7 @@ batch | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 03eaafbae..525c7c0be 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -99,6 +99,7 @@ format | **required**: False | **type**: bool + | **default**: False @@ -180,6 +181,7 @@ remove | **required**: False | **type**: bool + | **default**: False dest_data_set @@ -326,6 +328,7 @@ force | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index 7c9a8a876..cc6c60d66 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -103,6 +103,7 @@ full_volume | **required**: False | **type**: bool + | **default**: False temp_volume @@ -130,6 +131,7 @@ recover | **required**: False | **type**: bool + | **default**: False overwrite @@ -139,6 +141,7 @@ overwrite | **required**: False | **type**: bool + | **default**: False sms_storage_class diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index e1e11486c..f3eef5967 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -124,6 +124,7 @@ backup | **required**: False | **type**: bool + | **default**: False backup_name @@ -171,6 +172,7 @@ force | **required**: False | **type**: bool + | **default**: False indentation @@ -178,6 +180,7 @@ indentation | **required**: False | **type**: int + | **default**: 0 diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 004671ebc..86a3a9463 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -41,6 +41,7 @@ asa_text | **required**: False | **type**: bool + | **default**: False backup @@ -52,6 +53,7 @@ backup | **required**: False | **type**: bool + | **default**: False backup_name @@ -155,6 +157,7 @@ force | **required**: False | **type**: bool + | **default**: False force_lock @@ -168,6 +171,7 @@ force_lock | **required**: False | **type**: bool + | **default**: False ignore_sftp_stderr @@ -177,6 +181,7 @@ ignore_sftp_stderr | **required**: False | **type**: bool + | **default**: False is_binary @@ -188,6 +193,7 @@ is_binary | **required**: False | **type**: bool + | **default**: False executable @@ -203,6 +209,7 @@ executable | **required**: False | **type**: bool + | **default**: False aliases @@ -214,6 +221,7 @@ aliases | **required**: False | **type**: bool + | **default**: False local_follow @@ -246,6 +254,7 @@ remote_src | **required**: False | **type**: bool + | **default**: False src @@ -261,6 +270,8 @@ src If ``src`` is a directory and ends with "/", the contents of it will be copied into the root of ``dest``. If it doesn't end with "/", the directory itself will be copied. + If ``src`` is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. + If ``src`` is a VSAM data set, ``dest`` must also be a VSAM. Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. @@ -278,6 +289,7 @@ validate | **required**: False | **type**: bool + | **default**: False volume @@ -446,6 +458,7 @@ use_template | **required**: False | **type**: bool + | **default**: False template_parameters @@ -526,6 +539,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False trim_blocks @@ -543,6 +557,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False newline_sequence @@ -559,6 +574,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index ddcc97a8b..70e798a08 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -266,6 +266,7 @@ replace | **required**: False | **type**: bool + | **default**: False tmp_hlq @@ -288,6 +289,7 @@ force | **required**: False | **type**: bool + | **default**: False batch @@ -537,6 +539,7 @@ batch | **required**: False | **type**: bool + | **default**: False force @@ -550,6 +553,7 @@ batch | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 089208c5f..4c2294e24 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -86,6 +86,7 @@ backup | **required**: False | **type**: bool + | **default**: False backup_name @@ -110,6 +111,7 @@ backup_compress | **required**: False | **type**: bool + | **default**: False tmp_hlq diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 66792fa87..21b573a2a 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -130,6 +130,7 @@ ignore_sftp_stderr | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 63bd22701..0247ffd96 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -52,6 +52,7 @@ filter | **required**: False | **type**: list | **elements**: str + | **default**: [] diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index e0fd8e2d1..6cff37a6a 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -66,6 +66,7 @@ wait | **required**: False | **type**: bool + | **default**: False wait_time_s @@ -151,6 +152,7 @@ use_template | **required**: False | **type**: bool + | **default**: False template_parameters @@ -231,6 +233,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False trim_blocks @@ -248,6 +251,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False newline_sequence @@ -264,6 +268,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index e352007df..f7005017e 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -83,6 +83,7 @@ backrefs | **required**: False | **type**: bool + | **default**: False insertafter @@ -140,6 +141,7 @@ backup | **required**: False | **type**: bool + | **default**: False backup_name @@ -173,6 +175,7 @@ firstmatch | **required**: False | **type**: bool + | **default**: False encoding @@ -194,6 +197,7 @@ force | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index 9ff7ba106..42e8a8ea6 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -113,6 +113,7 @@ persistent | **required**: False | **type**: bool + | **default**: False backup_name diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index fb6a1a726..3ebedadd5 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -51,6 +51,7 @@ auth | **required**: False | **type**: bool + | **default**: False verbose @@ -60,6 +61,7 @@ verbose | **required**: False | **type**: bool + | **default**: False dds @@ -156,6 +158,7 @@ dds | **required**: False | **type**: bool + | **default**: False replace @@ -173,6 +176,7 @@ dds | **required**: False | **type**: bool + | **default**: False backup @@ -182,6 +186,7 @@ dds | **required**: False | **type**: bool + | **default**: False space_type @@ -888,6 +893,7 @@ dds | **required**: False | **type**: bool + | **default**: False replace @@ -905,6 +911,7 @@ dds | **required**: False | **type**: bool + | **default**: False backup @@ -914,6 +921,7 @@ dds | **required**: False | **type**: bool + | **default**: False space_type diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index e0f65414f..9ad26d64c 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -42,6 +42,7 @@ verbose | **required**: False | **type**: bool + | **default**: False wait_time_s diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index acb06be50..b2e99d399 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -90,6 +90,7 @@ message_filter | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index bc8dff3c0..f51096361 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -113,6 +113,7 @@ use_template | **required**: False | **type**: bool + | **default**: False template_parameters @@ -193,6 +194,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False trim_blocks @@ -210,6 +212,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False newline_sequence @@ -226,6 +229,7 @@ template_parameters | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index 846cb93d8..816a859e7 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -44,6 +44,7 @@ max_rc | **required**: False | **type**: int + | **default**: 0 diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index ae3b92516..da80bd31a 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -83,6 +83,7 @@ format | **required**: False | **type**: bool + | **default**: False dest_volumes @@ -172,6 +173,7 @@ list | **required**: False | **type**: bool + | **default**: False dest_data_set @@ -336,6 +338,7 @@ force | **required**: False | **type**: bool + | **default**: False remote_src @@ -345,6 +348,7 @@ remote_src | **required**: False | **type**: bool + | **default**: False diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index b198d74de..6770aa879 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,6 +6,78 @@ Releases ======== +Version 1.9.0-beta.1 +==================== + +Minor Changes +------------- +- ``zos_apf`` - Improved exception handling when the module is unable to process a response originating as a batch update. +- ``zos_copy`` - Improved performance when copying multiple members from one PDS/E to another PDS/E. +- ``zos_job_output`` - Has been enhanced to allow for both a job ID and owner to be selected when obtaining job output, removing the prior mutual exclusivity. +- ``zos_operator`` - Improved the modules handling of ZOAU import errors allowing for the traceback to flow back to the source. +- ``zos_job_query`` - Improved the modules handling of ZOAU import errors allowing for the traceback to flow back to the source. +- ``zos_job_submit`` + + - Improved messages in the action plugin. + - Improved the action plugin performance, flow and use of undocumented variables. + - Improved the modules handling of ZOAU import errors allowing for the traceback to flow back to the source. +- ``zos_tso_command`` - Has been updated with a new example demonstrating how to explicitly execute a REXX script in a data set. +- ``zos_mvs_raw`` + + - Has been enhanced to ensure that **instream-data** for option **dd_input** contain blanks in columns 1 and 2 while retaining a maximum length + of 80 columns for strings and a list of strings. This is generally the requirement for most z/OS programs. + - Has been updated with new examples demonstrating a YAML block indicator, often helpful when wanting to control the + **instream-data** formatting. + + +Bugfixes +-------- + +- ``zos_copy`` + + - Fixed an issue when copying an aliased executable from a data set to a non-existent data set, the destination data sets primary + and secondary extents would not match the source data set extent sizes. + - Fixed an issue when performing a copy operation to an existing file, the copied file resulted in having corrupted contents. + +- ``zos_job_output`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix + supports shorter job IDs as well as the use of wildcards. + +- ``zos_job_query`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix + supports shorter job IDs as well as the use of wildcards. + +- ``zos_unarchive`` + + - Fixed an issue when using a local file with the USS format option that would fail sending it to the managed node. + - Fixed an issue that occurred when unarchiving USS files that would leave temporary files behind on the managed node. + +Known Issues +------------ + +Several modules have reported UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. + +This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, +``zos_operator_action_query``` but are not limited to this list. This will be addressed in **ibm_zos_core** version 1.10.0-beta.1. Each case is +unique, some options to work around the error are below. + +- Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. +- Add **ignore_errors:true** to the playbook task so the task error will not fail the playbook. +- If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a variable and extract the job ID with + a regular expression and then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Reference +--------- + +* Supported by `z/OS®`_ V2R4 or later +* Supported by the `z/OS® shell`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3. + Version 1.8.0 ============= @@ -66,8 +138,16 @@ Bugfixes Known Issues ------------ -- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. -- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. +Several modules have reported UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. + +This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, +``zos_operator_action_query``` but are not limited to this list. This will be addressed in **ibm_zos_core** version 1.10.0-beta.1. Each case is +unique, some options to work around the error are below. + +- Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. +- Add **ignore_errors:true** to the playbook task so the task error will not fail the playbook. +- If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a variable and extract the job ID with + a regular expression and then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. Availability ------------ @@ -908,6 +988,8 @@ Known issues https://www.ibm.com/docs/en/zoau/1.2.x .. _Z Open Automation Utilities 1.2.4: https://www.ibm.com/docs/en/zoau/1.2.x +.. _Z Open Automation Utilities 1.2.5: + https://www.ibm.com/docs/en/zoau/1.2.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS®: diff --git a/docs/templates/module.rst.j2 b/docs/templates/module.rst.j2 index 7f31b536d..ec1c9bc1a 100644 --- a/docs/templates/module.rst.j2 +++ b/docs/templates/module.rst.j2 @@ -62,7 +62,7 @@ Synopsis {% if spec.elements %} {{ " " * level }}| **elements**: {{ spec.elements }} {% endif %} -{% if spec.default %} +{% if 'default' in spec and spec.default is not none and spec.default != ''%} {{ " " * level }}| **default**: {{ spec.default }} {% endif %} {% if spec.choices %} diff --git a/galaxy.yml b/galaxy.yml index f7be530c7..93af5d038 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.8.0 +version: 1.9.0-beta.1 # Collection README file readme: README.md diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index e1ee28246..abab47f9c 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.8.0" +version: "1.9.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - "1.2.4" + - "1.2.5" diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index fa65f7318..4b0245b37 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -103,7 +103,7 @@ def run(self, tmp=None, task_vars=None): # if self._connection._shell.path_has_trailing_slash(dest): # dest_file = self._connection._shell.join_path(dest, source_rel) # else: - dest_file = self._connection._shell.join_path(dest_path) + self._connection._shell.join_path(dest_path) tmp_src = self._connection._shell.join_path(tmp, "source") diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 6281c5cd6..012a46c0c 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -161,16 +161,15 @@ BetterArgParser, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + zoau_version_checker +) + try: from zoautil_py import opercmd except Exception: opercmd = ZOAUImportError(traceback.format_exc()) -try: - from zoautil_py import ZOAU_API_VERSION -except Exception: - ZOAU_API_VERSION = "1.2.0" - def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): @@ -284,13 +283,8 @@ def run_operator_command(params): wait_s = params.get("wait_time_s") cmdtxt = params.get("cmd") - zv = ZOAU_API_VERSION.split(".") use_wait_arg = False - if zv[0] > "1": - use_wait_arg = True - elif zv[0] == "1" and zv[1] > "2": - use_wait_arg = True - elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": + if zoau_version_checker.is_zoau_version_higher_than("1.2.4"): use_wait_arg = True if use_wait_arg: diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index a035cad33..022708692 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -226,16 +226,15 @@ MissingZOAUImport, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + zoau_version_checker +) + try: from zoautil_py import opercmd except Exception: opercmd = MissingZOAUImport() -try: - from zoautil_py import ZOAU_API_VERSION -except Exception: - ZOAU_API_VERSION = "1.2.0" - def run_module(): module_args = dict( @@ -262,13 +261,8 @@ def run_module(): wait_s = 5 - zv = ZOAU_API_VERSION.split(".") use_wait_arg = False - if zv[0] > "1": - use_wait_arg = True - elif zv[0] == "1" and zv[1] > "2": - use_wait_arg = True - elif zv[0] == "1" and zv[1] == "2" and zv[2] > "4": + if zoau_version_checker.is_zoau_version_higher_than("1.2.4"): use_wait_arg = True if use_wait_arg: diff --git a/scripts/venv.sh b/scripts/venv.sh index 315e7a854..51426a055 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -297,7 +297,7 @@ find_in_path() { discover_python(){ # Don't use which, it only will find first in path within script # for python_found in `which python3 | cut -d" " -f3`; do - pys=("python3" "python3.8" "python3.9" "python3.10" "python3.11" "python3.12" "python3.13" "python3.14") + pys=("python3" "python3.8" "python3.9" "python3.10" "python3.11") # "python3.12" "python3.13" "python3.14") #pys=("python3.8" "python3.9") for py in "${pys[@]}"; do for python_found in `find_in_path $py`; do diff --git a/tests/config.yml b/tests/config.yml new file mode 100644 index 000000000..ce73e796d --- /dev/null +++ b/tests/config.yml @@ -0,0 +1,34 @@ +################################################################################ +# Copyright (c) IBM Corporation 2024 +################################################################################ + +# ############################################################################# +# Description +# Support for this feature was first added in ansible-core 2.12 so that +# ansible-test configured with desirable changes. This is an optional +# configuration, but when used, must be placed in "tests/config.yml" +# relative to the base of the collection. This configuration only +# applies to modules and module_utils. +# +# See additional example - +# https://github.com/ansible/ansible/blob/devel/test/lib/ansible_test/config/config.yml +# +# Options +# modules - required +# python_requires - required +# - 'default' - All Python versions supported by Ansible. +# This is the default value if no configuration is provided. +# - 'controller' - All Python versions supported by the Ansible controller. +# This indicates the modules/module_utils can only run on the controller. +# Intended for use only with modules/module_utils that depend on +# ansible-connection, which only runs on the controller. +# Unit tests for modules/module_utils will be permitted to import any +# Ansible code, instead of only module_utils. +# - SpecifierSet - A PEP 440 specifier set indicating the supported Python versions. +# This is only needed when modules/module_utils do not support all +# Python versions supported by Ansible. It is not necessary to exclude +# versions which Ansible does not support, as this will be done automatically. +# ############################################################################# + +modules: + python_requires: '>=3.9' diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index 9860e6d12..6f76ceb3f 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 34b21237567a5014c97aa927f6093a8af335b90d Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 9 Feb 2024 09:44:27 -0700 Subject: [PATCH 297/495] [v1.10.0] [zos_job_submit] Migrate zos_job_submit to ZOAU v1.3.0 (#1209) * Change jobs calls * Update exception handling * Remove deprecated wait option * Remove unneeded TODO * Update fetch calls to include the program name * Remove compatibility with previous versions of ZOAU * Disable tests that depend on zos_copy * Remove `wait` option from tests * Add note to test * Fix sanity issues * Add changelog fragment --- .../1209-zoau-migration-zos_job_submit.yml | 3 + plugins/module_utils/job.py | 51 +- plugins/modules/zos_job_submit.py | 105 ++--- .../modules/test_zos_job_submit_func.py | 439 ++++++++++-------- 4 files changed, 313 insertions(+), 285 deletions(-) create mode 100644 changelogs/fragments/1209-zoau-migration-zos_job_submit.yml diff --git a/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml b/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml new file mode 100644 index 000000000..6f58e2713 --- /dev/null +++ b/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_submit - Migrated the module to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1209). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 3d7d80d68..a854a0a1e 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -22,7 +22,6 @@ BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - # MissingZOAUImport, ZOAUImportError ) @@ -35,15 +34,8 @@ # from zoautil_py.jobs import read_output, list_dds, listing from zoautil_py import jobs except Exception: - # read_output = MissingZOAUImport() - # list_dds = MissingZOAUImport() - # listing = MissingZOAUImport() jobs = ZOAUImportError(traceback.format_exc()) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - zoau_version_checker -) - def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. @@ -248,19 +240,18 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] # creationdatetime=job[9] queueposition=job[10] - # starting in zoau 1.2.4, program_name[11] was added. - + # starting in zoau 1.2.4, program_name[11] was added. In 1.3.0, include_extended + # has to be set to true so we get the program name for a job. # Testing has shown that the program_name impact is minor, so we're removing that option - # This will also help maintain compatibility with 1.2.3 final_entries = [] - entries = jobs.fetch_multiple(job_id=job_id_temp) + entries = jobs.fetch_multiple(job_id=job_id_temp, include_extended=True) while ((entries is None or len(entries) == 0) and duration <= timeout): current_time = timer() duration = round(current_time - start_time) sleep(1) - entries = jobs.fetch_multiple(job_id=job_id_temp) + entries = jobs.fetch_multiple(job_id=job_id_temp, include_extended=True) if entries: for entry in entries: @@ -281,7 +272,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["system"] = "" job["owner"] = entry.owner - job["ret_code"] = {} + job["ret_code"] = dict() job["ret_code"]["msg"] = "{0} {1}".format(entry.status, entry.return_code) job["ret_code"]["msg_code"] = entry.return_code job["ret_code"]["code"] = None @@ -290,26 +281,16 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["ret_code"]["code"] = int(entry.return_code) job["ret_code"]["msg_text"] = entry.status - # this section only works on zoau 1.2.3/+ vvv - # Beginning in ZOAU v1.3.0, the Job class changes svc_class to # service_class. - if zoau_version_checker.is_zoau_version_higher_than("1.2.5"): - job["service_class"] = entry.service_class - elif zoau_version_checker.is_zoau_version_higher_than("1.2.2"): - job["svc_class"] = entry.svc_class - if zoau_version_checker.is_zoau_version_higher_than("1.2.2"): - job["job_class"] = entry.job_class - job["priority"] = entry.priority - job["asid"] = entry.asid - job["creation_date"] = str(entry.creation_datetime)[0:10] - job["creation_time"] = str(entry.creation_datetime)[12:] - job["queue_position"] = entry.queue_position - if zoau_version_checker.is_zoau_version_higher_than("1.2.3"): - job["program_name"] = entry.program_name - - # this section only works on zoau 1.2.3/+ ^^^ - + job["svc_class"] = entry.service_class + job["job_class"] = entry.job_class + job["priority"] = entry.priority + job["asid"] = entry.asid + job["creation_date"] = str(entry.creation_datetime)[0:10] + job["creation_time"] = str(entry.creation_datetime)[12:] + job["queue_position"] = entry.queue_position + job["program_name"] = entry.program_name job["class"] = "" job["content_type"] = "" job["ret_code"]["steps"] = [] @@ -329,6 +310,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T for single_dd in list_of_dds: dd = {} + if "dataset" not in single_dd: + continue + # If dd_name not None, only that specific dd_name should be returned if dd_name is not None: if dd_name not in single_dd["dataset"]: @@ -336,9 +320,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T else: dd["ddname"] = single_dd["dataset"] - if "dataset" not in single_dd: - continue - if "recnum" in single_dd: dd["record_count"] = single_dd["recnum"] else: diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index aa0026069..1fd5030b5 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -54,17 +54,6 @@ - DATA_SET can be a PDS, PDSE, or sequential data set. - USS means the JCL location is located in UNIX System Services (USS). - LOCAL means locally to the ansible control node. - wait: - required: false - default: false - type: bool - description: - - Setting this option will yield no change, it is deprecated. There is no - no need to set I(wait); setting I(wait_times_s) is the correct way to - configure the amount of tme to wait for a job to execute. - - Configuring wait used by the L(zos_job_submit,./zos_job_submit.html) module has been - deprecated and will be removed in ibm.ibm_zos_core collection. - - See option I(wait_time_s). wait_time_s: required: false default: 10 @@ -620,6 +609,7 @@ DataSet, ) from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils._text import to_text from timeit import default_timer as timer from os import remove import traceback @@ -627,9 +617,9 @@ import re try: - from zoautil_py import exceptions + from zoautil_py import exceptions as zoau_exceptions except ImportError: - exceptions = ZOAUImportError(traceback.format_exc()) + zoau_exceptions = ZOAUImportError(traceback.format_exc()) try: from zoautil_py import jobs @@ -642,7 +632,7 @@ MAX_WAIT_TIME_S = 86400 -def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, start_time=timer()): +def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=None, start_time=timer()): """ Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set. Arguments: @@ -652,7 +642,7 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, src_name (str) - the src name that was provided in the module because through the runtime src could be replace with a temporary file name timeout (int) - how long to wait in seconds for a job to complete - hfs (boolean) - True if JCL is a file in USS, otherwise False; Note that all + is_unix (bool) - True if JCL is a file in USS, otherwise False; Note that all JCL local to a controller is transfered to USS thus would be True volume (str) - volume the data set JCL is located on that will be cataloged before @@ -666,11 +656,11 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, """ kwargs = { - "timeout": timeout, - "hfs": hfs, + # Since every fetch retry waits for a second before continuing, + # we can just pass the timeout (also in seconds) to this arg. + "fetch_max_retries": timeout, } - wait = True # Wait is always true because the module requires wait_time_s > 0 present = False duration = 0 job_submitted = None @@ -691,9 +681,9 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, "not be cataloged on the volume {1}.".format(src, volume)) module.fail_json(**result) - job_submitted = jobs.submit(src, wait, None, **kwargs) + job_submitted = jobs.submit(src, is_unix=is_unix, **kwargs) - # Introducing a sleep to ensure we have the result of job sumbit carrying the job id + # Introducing a sleep to ensure we have the result of job submit carrying the job id. while (job_submitted is None and duration <= timeout): current_time = timer() duration = round(current_time - start_time) @@ -704,68 +694,69 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, # that is sent back as `AC` when the job is not complete but the problem # with monitoring 'AC' is that STARTED tasks never exit the AC status. if job_submitted: - job_listing_rc = jobs.listing(job_submitted.id)[0].rc - job_listing_status = jobs.listing(job_submitted.id)[0].status + job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code + job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status # Before moving forward lets ensure our job has completed but if we see # status that matches one in JOB_ERROR_MESSAGES, don't wait, let the code - # drop through and get analyzed in the main as it will scan the job ouput - # Any match to JOB_ERROR_MESSAGES ends our processing and wait times - while (job_listing_status not in JOB_ERROR_MESSAGES and - job_listing_status == 'AC' and - ((job_listing_rc is None or len(job_listing_rc) == 0 or - job_listing_rc == '?') and duration < timeout)): + # drop through and get analyzed in the main as it will scan the job ouput. + # Any match to JOB_ERROR_MESSAGES ends our processing and wait times. + while (job_fetch_status not in JOB_ERROR_MESSAGES and + job_fetch_status == 'AC' and + ((job_fetch_rc is None or len(job_fetch_rc) == 0 or + job_fetch_rc == '?') and duration < timeout)): current_time = timer() duration = round(current_time - start_time) sleep(1) - job_listing_rc = jobs.listing(job_submitted.id)[0].rc - job_listing_status = jobs.listing(job_submitted.id)[0].status + job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code + job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status - # ZOAU throws a ZOAUException when the job sumbission fails thus there is no + # ZOAU throws a JobSubmitException when the job sumbission fails thus there is no # JCL RC to share with the user, if there is a RC, that will be processed # in the job_output parser. - except exceptions.ZOAUException as err: + except zoau_exceptions.JobSubmitException as err: result["changed"] = False result["failed"] = True - result["stderr"] = str(err) + result["stderr"] = to_text(err) result["duration"] = duration - result["job_id"] = job_submitted.id if job_submitted else None + result["job_id"] = job_submitted.job_id if job_submitted else None result["msg"] = ("Unable to submit job {0}, the job submission has failed. " "Without the job id, the error can not be determined. " "Consider using module `zos_job_query` to poll for the " "job by name or review the system log for purged jobs " - "resulting from an abend.".format(src_name)) + "resulting from an abend. Standard error may have " + "additional information.".format(src_name)) module.fail_json(**result) - # ZOAU throws a JobSubmitException when timeout has execeeded in that no job_id - # has been returned within the allocated time. - except exceptions.JobSubmitException as err: + # ZOAU throws a JobFetchException when it is unable to fetch a job. + # This could happen while trying to fetch a job still running. + except zoau_exceptions.JobFetchException as err: result["changed"] = False result["failed"] = False - result["stderr"] = str(err) + result["stderr"] = to_text(err) result["duration"] = duration - result["job_id"] = job_submitted.id if job_submitted else None - result["msg"] = ("The JCL has been submitted {0} and no job id was returned " - "within the allocated time of {1} seconds. Consider using " - " module zos_job_query to poll for a long running " - "jobs or increasing the value for " - "`wait_times_s`.".format(src_name, str(timeout))) + result["job_id"] = job_submitted.job_id + result["msg"] = ("The JCL has been submitted {0} with ID {1} but there was an " + "error while fetching its status within the allocated time of {2} " + "seconds. Consider using module zos_job_query to poll for the " + "job for more information. Standard error may have additional " + "information.".format(src_name, job_submitted.job_id, str(timeout))) module.fail_json(**result) - # Between getting a job_submitted and the jobs.listing(job_submitted.id)[0].rc + # Between getting a job_submitted and the jobs.fetch_multiple(job_submitted.job_id)[0].return_code # is enough time for the system to purge an invalid job, so catch it and let # it fall through to the catchall. except IndexError: job_submitted = None # There appears to be a small fraction of time when ZOAU has a handle on the - # job and and suddenly its purged, this check is to ensure the job is there + # job and suddenly its purged, this check is to ensure the job is there # long after the purge else we throw an error here if its been purged. if job_submitted is None: result["changed"] = False result["failed"] = True result["duration"] = duration - result["job_id"] = job_submitted.id if job_submitted else None + result["job_id"] = job_submitted.job_id if job_submitted else None result["msg"] = ("The job {0} has been submitted and no job id was returned " "within the allocated time of {1} seconds. Without the " "job id, the error can not be determined, consider using " @@ -774,15 +765,12 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, hfs=True, volume=None, "abend.".format(src_name, str(timeout))) module.fail_json(**result) - return job_submitted.id if job_submitted else None, duration + return job_submitted.job_id if job_submitted else None, duration def run_module(): module_args = dict( src=dict(type="str", required=True), - wait=dict(type="bool", required=False, default=False, - removed_at_date='2022-11-30', - removed_from_collection='ibm.ibm_zos_core'), location=dict( type="str", default="DATA_SET", @@ -853,8 +841,6 @@ def run_module(): arg_defs = dict( src=dict(arg_type="data_set_or_path", required=True), - wait=dict(arg_type="bool", required=False, removed_at_date='2022-11-30', - removed_from_collection='ibm.ibm_zos_core'), location=dict( arg_type="str", default="DATA_SET", @@ -885,7 +871,6 @@ def run_module(): # Extract values from set module options location = parsed_args.get("location") volume = parsed_args.get("volume") - parsed_args.get("wait") src = parsed_args.get("src") return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") @@ -906,13 +891,13 @@ def run_module(): start_time = timer() if location == "DATA_SET": job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, hfs=False, volume=volume, start_time=start_time) + module, src, src_name=src, timeout=wait_time_s, is_unix=False, volume=volume, start_time=start_time) elif location == "USS": job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, hfs=True) + module, src, src_name=src, timeout=wait_time_s, is_unix=True) elif location == "LOCAL": job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, hfs=True) + module, src, src_name=src, timeout=wait_time_s, is_unix=True) try: # Explictly pass None for the unused args else a default of '*' will be @@ -997,7 +982,7 @@ def run_module(): result["msg"] = ("The JCL submitted with job id {0} but " "there was an error, please review " "the error for further details: {1}".format - (str(job_submitted_id), str(err))) + (str(job_submitted_id), to_text(err))) module.exit_json(**result) finally: diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 0fe6a59b9..cdd7175d2 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -277,7 +277,7 @@ def test_job_submit_PDS(ansible_zos_module): cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET", wait=True + src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET" ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -306,7 +306,6 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): results = hosts.all.zos_job_submit( src="{0}(SAMPLE)".format(DATA_SET_NAME_SPECIAL_CHARS), location="DATA_SET", - wait=True, ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -325,7 +324,7 @@ def test_job_submit_USS(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -334,44 +333,64 @@ def test_job_submit_USS(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_job_submit_LOCAL(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) -def test_job_submit_LOCAL(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) - - for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("changed") is True +# for result in results.contacted.values(): +# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" +# assert result.get("jobs")[0].get("ret_code").get("code") == 0 +# assert result.get("changed") is True -def test_job_submit_LOCAL_extraR(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_BACKSLASH_R) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_job_submit_LOCAL_extraR(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_BACKSLASH_R) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) - for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("changed") is True +# for result in results.contacted.values(): +# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" +# assert result.get("jobs")[0].get("ret_code").get("code") == 0 +# assert result.get("changed") is True -def test_job_submit_LOCAL_BADJCL(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_BAD) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_job_submit_LOCAL_BADJCL(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_BAD) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) - for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." - assert result.get("changed") is False - assert re.search(r'completion code', repr(result.get("msg"))) +# for result in results.contacted.values(): +# # Expecting: The job completion code (CC) was not in the job log....." +# assert result.get("changed") is False +# assert re.search(r'completion code', repr(result.get("msg"))) def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): @@ -508,123 +527,137 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): hosts.all.zos_data_set(name=data_set_name, state="absent") -@pytest.mark.parametrize("args", [ - dict(max_rc=None, wait_time_s=10), - dict(max_rc=4, wait_time_s=10), - dict(max_rc=12, wait_time_s=20) -]) -def test_job_submit_max_rc(ansible_zos_module, args): - """This""" - try: - hosts = ansible_zos_module - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_RC_8) - - results = hosts.all.zos_job_submit( - src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] - ) - - for result in results.contacted.values(): - # Should fail normally as a non-zero RC will result in job submit failure - if args["max_rc"] is None: - assert result.get("msg") is not None - assert result.get('changed') is False - # On busy systems, it is possible that the duration even for a job with a non-zero return code - # will take considerable time to obtain the job log and thus you could see either error msg below - #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" - # - Consider using module zos_job_query to poll for a long running job or - # increase option \\'wait_times_s` to a value greater than 10.", - if result.get('duration'): - duration = result.get('duration') - else: - duration = 0 - - if duration >= args["wait_time_s"]: - re.search(r'long running job', repr(result.get("msg"))) - else: - assert re.search(r'non-zero', repr(result.get("msg"))) - - # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 - elif args["max_rc"] == 4: - assert result.get("msg") is not None - assert result.get('changed') is False - # Expecting "The job return code, 'ret_code[code]' 8 for the submitted job is greater - # than the value set for option 'max_rc' 4. Increase the value for 'max_rc' otherwise - # this job submission has failed. - assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) - - elif args["max_rc"] == 12: - # Will not fail but changed will be false for the non-zero RC, there - # are other possibilities like an ABEND or JCL ERROR will fail this even - # with a MAX RC - assert result.get("msg") is None - assert result.get('changed') is False - assert result.get("jobs")[0].get("ret_code").get("code") < 12 - finally: - hosts.all.file(path=tmp_file.name, state="absent") - - -@pytest.mark.template -@pytest.mark.parametrize("args", [ - dict( - template="Default", - options=dict( - keep_trailing_newline=False - ) - ), - dict( - template="Custom", - options=dict( - keep_trailing_newline=False, - variable_start_string="((", - variable_end_string="))", - comment_start_string="(#", - comment_end_string="#)" - ) - ), - dict( - template="Loop", - options=dict( - keep_trailing_newline=False - ) - ) -]) -def test_job_submit_jinja_template(ansible_zos_module, args): - try: - hosts = ansible_zos_module - - tmp_file = tempfile.NamedTemporaryFile(delete=False) - with open(tmp_file.name, "w") as f: - f.write(JCL_TEMPLATES[args["template"]]) - - template_vars = dict( - pgm_name="HELLO", - input_dataset="DUMMY", - message="Hello, world", - steps=[ - dict(step_name="IN", dd="DUMMY"), - dict(step_name="PRINT", dd="SYSOUT=*"), - dict(step_name="UT1", dd="*") - ] - ) - for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): - host.vars.update(template_vars) - - results = hosts.all.zos_job_submit( - src=tmp_file.name, - location="LOCAL", - use_template=True, - template_parameters=args["options"] - ) +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# @pytest.mark.parametrize("args", [ +# dict(max_rc=None, wait_time_s=10), +# dict(max_rc=4, wait_time_s=10), +# dict(max_rc=12, wait_time_s=20) +# ]) +# def test_job_submit_max_rc(ansible_zos_module, args): +# """This""" +# try: +# hosts = ansible_zos_module +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_RC_8) + +# results = hosts.all.zos_job_submit( +# src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] +# ) + +# for result in results.contacted.values(): +# # Should fail normally as a non-zero RC will result in job submit failure +# if args["max_rc"] is None: +# assert result.get("msg") is not None +# assert result.get('changed') is False +# # On busy systems, it is possible that the duration even for a job with a non-zero return code +# # will take considerable time to obtain the job log and thus you could see either error msg below +# #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" +# # - Consider using module zos_job_query to poll for a long running job or +# # increase option \\'wait_times_s` to a value greater than 10.", +# if result.get('duration'): +# duration = result.get('duration') +# else: +# duration = 0 + +# if duration >= args["wait_time_s"]: +# re.search(r'long running job', repr(result.get("msg"))) +# else: +# assert re.search(r'non-zero', repr(result.get("msg"))) + +# # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 +# elif args["max_rc"] == 4: +# assert result.get("msg") is not None +# assert result.get('changed') is False +# # Expecting "The job return code, 'ret_code[code]' 8 for the submitted job is greater +# # than the value set for option 'max_rc' 4. Increase the value for 'max_rc' otherwise +# # this job submission has failed. +# assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) + +# elif args["max_rc"] == 12: +# # Will not fail but changed will be false for the non-zero RC, there +# # are other possibilities like an ABEND or JCL ERROR will fail this even +# # with a MAX RC +# assert result.get("msg") is None +# assert result.get('changed') is False +# assert result.get("jobs")[0].get("ret_code").get("code") < 12 +# finally: +# hosts.all.file(path=tmp_file.name, state="absent") - for result in results.contacted.values(): - assert result.get('changed') is True - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - finally: - os.remove(tmp_file.name) +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# @pytest.mark.template +# @pytest.mark.parametrize("args", [ +# dict( +# template="Default", +# options=dict( +# keep_trailing_newline=False +# ) +# ), +# dict( +# template="Custom", +# options=dict( +# keep_trailing_newline=False, +# variable_start_string="((", +# variable_end_string="))", +# comment_start_string="(#", +# comment_end_string="#)" +# ) +# ), +# dict( +# template="Loop", +# options=dict( +# keep_trailing_newline=False +# ) +# ) +# ]) +# def test_job_submit_jinja_template(ansible_zos_module, args): +# try: +# hosts = ansible_zos_module + +# tmp_file = tempfile.NamedTemporaryFile(delete=False) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_TEMPLATES[args["template"]]) + +# template_vars = dict( +# pgm_name="HELLO", +# input_dataset="DUMMY", +# message="Hello, world", +# steps=[ +# dict(step_name="IN", dd="DUMMY"), +# dict(step_name="PRINT", dd="SYSOUT=*"), +# dict(step_name="UT1", dd="*") +# ] +# ) +# for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): +# host.vars.update(template_vars) + +# results = hosts.all.zos_job_submit( +# src=tmp_file.name, +# location="LOCAL", +# use_template=True, +# template_parameters=args["options"] +# ) + +# for result in results.contacted.values(): +# assert result.get('changed') is True +# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" +# assert result.get("jobs")[0].get("ret_code").get("code") == 0 + +# finally: +# os.remove(tmp_file.name) def test_job_submit_full_input(ansible_zos_module): @@ -635,7 +668,12 @@ def test_job_submit_full_input(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FULL_INPUT), TEMP_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + src="{0}/SAMPLE".format(TEMP_PATH), + location="USS", + volume=None, + # This job used to set wait=True, but since it has been deprecated + # and removed, it now waits up to 30 seconds. + wait_time_s=30 ) for result in results.contacted.values(): print(result) @@ -645,45 +683,66 @@ def test_job_submit_full_input(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") -def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_NO_DSN) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") - for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." - assert result.get("changed") is False - assert re.search(r'completion code', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_NO_DSN) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") +# for result in results.contacted.values(): +# # Expecting: The job completion code (CC) was not in the job log....." +# assert result.get("changed") is False +# assert re.search(r'completion code', repr(result.get("msg"))) +# assert result.get("jobs")[0].get("job_id") is not None +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" # Should have a JCL ERROR <int> -def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_INVALID_USER) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") - for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." - assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error SEC', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) - - -def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): - tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: - f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) - hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") - for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." - assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error ? ?', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None - assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" +# def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_INVALID_USER) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") +# for result in results.contacted.values(): +# # Expecting: The job completion code (CC) was not in the job log....." +# assert result.get("changed") is False +# assert re.search(r'return code was not available', repr(result.get("msg"))) +# assert re.search(r'error SEC', repr(result.get("msg"))) +# assert result.get("jobs")[0].get("job_id") is not None +# assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + + +""" +keyword: ENABLE-FOR-1-3 +Test commented because it depends on zos_copy, which has not yet been +migrated to ZOAU v1.3.0. Whoever works in issue +https://github.com/ansible-collections/ibm_zos_core/issues/1106 +should uncomment this test as part of the validation process. +""" +# def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): +# tmp_file = tempfile.NamedTemporaryFile(delete=True) +# with open(tmp_file.name, "w") as f: +# f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) +# hosts = ansible_zos_module +# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") +# for result in results.contacted.values(): +# # Expecting: The job completion code (CC) was not in the job log....." +# assert result.get("changed") is False +# assert re.search(r'return code was not available', repr(result.get("msg"))) +# assert re.search(r'error ? ?', repr(result.get("msg"))) +# assert result.get("jobs")[0].get("job_id") is not None +# assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" From 6bc8161e1c9309dfbddd84728649ec93252cfa1b Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Mon, 12 Feb 2024 17:06:11 -0600 Subject: [PATCH 298/495] [v1.10.0][zos_job_query] Removed wait argument from test cases (#1217) * Removed wait arg from test cases * Added changelog --- changelogs/fragments/1217-validate-job-query.yml | 3 +++ tests/functional/modules/test_zos_job_query_func.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1217-validate-job-query.yml diff --git a/changelogs/fragments/1217-validate-job-query.yml b/changelogs/fragments/1217-validate-job-query.yml new file mode 100644 index 000000000..df97c3ca6 --- /dev/null +++ b/changelogs/fragments/1217-validate-job-query.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_query - Removed zos_job_submit wait argument from tests. + (https://github.com/ansible-collections/ibm_zos_core/pull/1217). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index c0dc5bdca..8c1f170ed 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -63,7 +63,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="DATA_SET", wait=True + src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="DATA_SET", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -96,7 +96,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="DATA_SET", wait=True + src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="DATA_SET", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" From 658cefd54098df1f892a08a71ac09dc6a8111350 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 13 Feb 2024 14:47:09 -0600 Subject: [PATCH 299/495] [v1.10.0] [ zos_job_submit] Bugfix issue for zos_job_submit when location is not set (#1220) * Pulled v1.9.0 changes * Removed changelog * Updated changelog * Removed summary * Removed wait option --- ...ort_job_name_sends_back_a_value_error.yaml | 11 -------- ...20-bugfix-zos_job_submit-default_value.yml | 4 +++ plugins/action/zos_job_submit.py | 2 +- .../modules/test_zos_job_submit_func.py | 27 ++++++++++++++++--- 4 files changed, 28 insertions(+), 16 deletions(-) delete mode 100644 changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml create mode 100644 changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml diff --git a/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml b/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml deleted file mode 100644 index dd9dc98a5..000000000 --- a/changelogs/fragments/1078-short_job_name_sends_back_a_value_error.yaml +++ /dev/null @@ -1,11 +0,0 @@ -bugfixes: - - zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. - Change now allows the use of a shorter job ID or name, as well as wildcards. - (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - - zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. - Change now allows the use of a shorter job ID or name, as well as wildcards. - (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - -minor_change: - - zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. - (https://github.com/ansible-collections/ibm_zos_core/pull/1078). \ No newline at end of file diff --git a/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml b/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml new file mode 100644 index 000000000..83d2391ba --- /dev/null +++ b/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. + (https://github.com/ansible-collections/ibm_zos_core/pull/1220). \ No newline at end of file diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 4b0245b37..6bbd0f9d9 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -52,7 +52,7 @@ def run(self, tmp=None, task_vars=None): )) return result - if module_args["location"] == "LOCAL": + if location == "LOCAL": source = self._task.args.get("src", None) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index cdd7175d2..451f79c83 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -262,8 +262,21 @@ TEMP_PATH = "/tmp/jcl" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" -def test_job_submit_PDS(ansible_zos_module): +@pytest.mark.parametrize( + "location", [ + dict(default_location=True), + dict(default_location=False), + ] +) +def test_job_submit_PDS(ansible_zos_module, location): + """ + Test zos_job_submit with a PDS(MEMBER), also test the default + value for 'location', ensure it works with and without the + value "DATA_SET". If default_location is True, then don't + pass a 'location:DATA_SET' allow its default to come through. + """ try: + results = None hosts = ansible_zos_module data_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") @@ -276,9 +289,15 @@ def test_job_submit_PDS(ansible_zos_module): hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) - results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET" - ) + if bool(location.get("default_location")): + results = hosts.all.zos_job_submit( + src="{0}(SAMPLE)".format(data_set_name), wait_time_s=30 + ) + else: + results = hosts.all.zos_job_submit( + src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET", wait_time_s=30 + ) + for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 From 05b53c4f2a3d87f71c2055c74e80ce8102c1b174 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 13 Feb 2024 14:47:50 -0600 Subject: [PATCH 300/495] [v1.10.0][zos_encode] Migrate zos_encode to use ZOAU 1.3 (#1218) * Modified zos_encode and remvoved wwait option for zos_job_submit * Updated parameter * fixed dsorg * Fixed copyright years * Added changelog --- changelogs/fragments/1218-migrate-zos_encode.yml | 3 +++ plugins/module_utils/data_set.py | 4 ++-- plugins/modules/zos_encode.py | 7 ++++--- tests/functional/modules/test_zos_encode_func.py | 10 +++++----- 4 files changed, 14 insertions(+), 10 deletions(-) create mode 100644 changelogs/fragments/1218-migrate-zos_encode.yml diff --git a/changelogs/fragments/1218-migrate-zos_encode.yml b/changelogs/fragments/1218-migrate-zos_encode.yml new file mode 100644 index 000000000..3d712b749 --- /dev/null +++ b/changelogs/fragments/1218-migrate-zos_encode.yml @@ -0,0 +1,3 @@ +trivial: + - zos_encode - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1218). diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 8b02d77f4..33b1958b4 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -543,10 +543,10 @@ def data_set_type(name, volume=None): data_sets_found = datasets.list_datasets(name) - # Using the DSORG property when it's a sequential or partitioned + # Using the organization property when it's a sequential or partitioned # dataset. VSAMs are not found by datasets.list_datasets. if len(data_sets_found) > 0: - return data_sets_found[0].dsorg + return data_sets_found[0].organization # Next, trying to get the DATA information of a VSAM through # LISTCAT. diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 2628ab174..1adc08c01 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -273,7 +273,7 @@ sample: /path/file_name.2020-04-23-08-32-29-bak.tar """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, @@ -286,11 +286,12 @@ from os import makedirs from os import listdir import re +import traceback try: from zoautil_py import datasets except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) def check_pds_member(ds, mem): diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 5d58f2435..e017450ff 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -545,7 +545,7 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 ) for result in results.contacted.values(): @@ -671,7 +671,7 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 ) for result in results.contacted.values(): assert result.get("jobs") is not None @@ -803,7 +803,7 @@ def test_vsam_backup(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 ) hosts.all.file(path=TEMP_JCL_PATH, state="absent") # submit JCL to populate KSDS @@ -814,7 +814,7 @@ def test_vsam_backup(ansible_zos_module): ) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait=True + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 ) hosts.all.zos_encode( From 3849a210f0614fbad30a424107055c198345b3c5 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Thu, 15 Feb 2024 08:28:54 -0800 Subject: [PATCH 301/495] Enabler updates the AC tool with operator support for python and changes python search order. (#1224) * Update requirement files to use new operator for python versions Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac to correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * Update script to support requirements operator for python and change python search order Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright year Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- ac | 3 +- scripts/requirements-2.11.env | 2 +- scripts/requirements-2.12.env | 2 +- scripts/requirements-2.13.env | 2 +- scripts/requirements-2.14.env | 2 +- scripts/requirements-2.15.env | 2 +- scripts/requirements-2.16.env | 32 ++++++++++++++ scripts/requirements-2.9.env | 2 +- scripts/venv.sh | 83 +++++++++++++++++++++++++++-------- 9 files changed, 104 insertions(+), 26 deletions(-) create mode 100644 scripts/requirements-2.16.env diff --git a/ac b/ac index b5febedbb..dad00194c 100755 --- a/ac +++ b/ac @@ -584,7 +584,7 @@ host_nodes(){ ## the 'password' option should only an option when the utility can not decrypt. ## Usage: ac [--venv-setup] [--password 123456] ## Example: -## $ ac --venv-setup --passsword 123456 +## $ ac --venv-setup --password 123456 ## $ ac --venv-setup venv_setup(){ option_pass=$1 @@ -636,7 +636,6 @@ while true; do exit 1 fi fi - case $1 in -h|-\?|--help) if [ "$1" = "-h" ] || [ "$1" = "-?" ]; then diff --git a/scripts/requirements-2.11.env b/scripts/requirements-2.11.env index e7defb9fc..3b4a18d0c 100644 --- a/scripts/requirements-2.11.env +++ b/scripts/requirements-2.11.env @@ -31,5 +31,5 @@ requirements=( ) python=( -"python:3.8" +"<=:python:3.9" ) \ No newline at end of file diff --git a/scripts/requirements-2.12.env b/scripts/requirements-2.12.env index 5052447da..4f6add957 100644 --- a/scripts/requirements-2.12.env +++ b/scripts/requirements-2.12.env @@ -28,5 +28,5 @@ requirements=( ) python=( -"python:3.8" +"<=:python:3.10" ) \ No newline at end of file diff --git a/scripts/requirements-2.13.env b/scripts/requirements-2.13.env index c08a7c7e9..cfce646d0 100644 --- a/scripts/requirements-2.13.env +++ b/scripts/requirements-2.13.env @@ -28,5 +28,5 @@ requirements=( ) python=( -"python:3.8" +"<=:python:3.10" ) \ No newline at end of file diff --git a/scripts/requirements-2.14.env b/scripts/requirements-2.14.env index 9d15b3dab..f1c423f8b 100644 --- a/scripts/requirements-2.14.env +++ b/scripts/requirements-2.14.env @@ -28,5 +28,5 @@ requirements=( ) python=( -"python:3.9" +"<=:python:3.11" ) \ No newline at end of file diff --git a/scripts/requirements-2.15.env b/scripts/requirements-2.15.env index 5f8b36260..3d94e55af 100644 --- a/scripts/requirements-2.15.env +++ b/scripts/requirements-2.15.env @@ -28,5 +28,5 @@ requirements=( ) python=( -"python:3.9" +"<=:python:3.11" ) diff --git a/scripts/requirements-2.16.env b/scripts/requirements-2.16.env new file mode 100644 index 000000000..2d0ed42a1 --- /dev/null +++ b/scripts/requirements-2.16.env @@ -0,0 +1,32 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.16.3" +"pylint" +"rstcheck" +) + +python=( +"<=:python:3.12" +) diff --git a/scripts/requirements-2.9.env b/scripts/requirements-2.9.env index 2d7d9e11b..b962483f9 100644 --- a/scripts/requirements-2.9.env +++ b/scripts/requirements-2.9.env @@ -30,6 +30,6 @@ requirements=( ) python=( -"python:3.8" +"==:python:3.8" ) diff --git a/scripts/venv.sh b/scripts/venv.sh index 51426a055..56756d16e 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -27,6 +27,13 @@ VENV_HOME_MANAGED=${PWD%/*}/venv # Array where each entry is: "<index>:<version>:<mount>:<data_set>" HOSTS_ALL="" +OPER_EQ="==" +OPER_NE="!=" +OPER_LT="<" +OPER_LE="<=" +OPER_GT=">" +OPER_GE=">=" + # hosts_env="hosts.env" # if [ -f "$hosts_env" ]; then @@ -128,9 +135,9 @@ echo_requirements(){ py_req="0" for ver in "${python[@]}" ; do - key=${ver%%:*} - value=${ver#*:} - py_req="${value}" + py_op=`echo "${ver}" | cut -d ":" -f 1` + py_name=`echo "${ver}" | cut -d ":" -f 2` + py_req=`echo "${ver}" | cut -d ":" -f 3` done echo "${py_req}" done @@ -222,13 +229,29 @@ write_requirements(){ py_req="0" for ver in "${python[@]}" ; do - key=${ver%%:*} - value=${ver#*:} - py_req="${value}" + py_op=`echo "${ver}" | cut -d ":" -f 1` + py_name=`echo "${ver}" | cut -d ":" -f 2` + py_req=`echo "${ver}" | cut -d ":" -f 3` done + if [ "$OPER_EQ" == "$py_op" ];then + py_op="-eq" + elif [ "$OPER_NE" == "$py_op" ];then + py_op="-ne" + elif [ "$OPER_LT" == "$py_op" ];then + py_op="-lt" + elif [ "$OPER_LE" == "$py_op" ];then + py_op="-le" + elif [ "$OPER_GT" == "$py_op" ];then + py_op="-gt" + elif [ "$OPER_GE" == "$py_op" ];then + py_op="-ge" + fi + + discover_python $py_op $py_req + # Is the discoverd python >= what the requirements.txt requires? - if [ $(normalize_version $VERSION_PYTHON) -ge $(normalize_version $py_req) ]; then + if [ $(normalize_version $VERSION_PYTHON) "$py_op" $(normalize_version $py_req) ]; then echo "${REQ}${REQ_COMMON}">"${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt cp mounts.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ #cp info.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ @@ -245,6 +268,16 @@ write_requirements(){ chmod 700 "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env #echo "${option_pass}" | openssl bf -d -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin echo "${option_pass}" | openssl enc -d -aes-256-cbc -a -in info.env.axx -out "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env -pass stdin + else + # echo a stub so the user can later choose to rename and configure + touch "${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "# This configuration file is used by the tool to avoid exporting enviroment variables">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "# To use this, update all the variables with a value and rename the file to 'info.env'.">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "USER=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "PASS=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "HOST_SUFFIX=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "SSH_KEY_PIPELINE=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme + echo "No password was provided, a temporary 'info.env.changeme' file has been created for your convenience." fi else echo "Not able to create managed venv path: ${VENV_HOME_MANAGED}/${venv_name} , min python required is ${py_req}, found version $VERSION_PYTHON" @@ -282,36 +315,50 @@ find_in_path() { result="" OTHER_PYTHON_PATHS="/Library/Frameworks/Python.framework/Versions/Current/bin:/opt/homebrew/bin:" PATH="${OTHER_PYTHON_PATHS}${PATH}" + OLDIFS=$IFS IFS=: for x in $PATH; do if [ -x "$x/$1" ]; then result=${result}" $x/$1" fi done + IFS=$OLDIFS echo $result } - # Find the most recent python in a users path discover_python(){ - # Don't use which, it only will find first in path within script + operator=$1 + required_python=$2 + if [ ! "$operator" ]; then + operator="-ge" + fi + + if [ "$required_python" ]; then + VERSION_PYTHON=$required_python + fi + + # Don't use which, it only will find first in path within the script # for python_found in `which python3 | cut -d" " -f3`; do - pys=("python3" "python3.8" "python3.9" "python3.10" "python3.11") # "python3.12" "python3.13" "python3.14") - #pys=("python3.8" "python3.9") + pys=("python3.14" "python3.13" "python3.12" "python3.11" "python3.10" "python3.9" "python3.8") + rc=1 for py in "${pys[@]}"; do for python_found in `find_in_path $py`; do ver=`${python_found} --version | cut -d" " -f2` + rc=$? + ver=`echo $ver |cut -d"." -f1,2` ver_path="$python_found" echo "Found $ver_path" done - - if [ $(normalize_version $ver) -ge $(normalize_version $VERSION_PYTHON) ]; then - VERSION_PYTHON="$ver" - VERSION_PYTHON_PATH="$ver_path" + if [ $rc -eq 0 ];then + if [ $(normalize_version $ver) "$operator" $(normalize_version $VERSION_PYTHON) ]; then + VERSION_PYTHON="$ver" + VERSION_PYTHON_PATH="$ver_path" + break + fi fi - done echo ${DIVIDER} @@ -406,7 +453,7 @@ set_hosts_to_array(){ else # check if the env varas instead have been exported if [ -z "$USER" ] || [ -z "$PASS" ] || [ -z "$HOST_SUFFIX" ]; then echo "This configuration requires either 'info.env' exist or environment vars for the z/OS host exist and be exported." - echo "Export and set vars: 'USER', 'PASS' and'HOST_SUFFIX', or place them in a file named info.env." + echo "Export and set vars: 'USER', 'PASS','HOST_SUFFIX' and optionally 'SSH_KEY_PIPELINE', or place them in a file named info.env." exit 1 fi fi @@ -566,7 +613,7 @@ case "$1" in discover_python ;; --vsetup) - discover_python + #discover_python make_venv_dirs #echo_requirements write_requirements $3 From a8d2fc227c66020cf47cd13b3edc862f20741453 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 15 Feb 2024 12:03:13 -0600 Subject: [PATCH 302/495] [v1.10.0][zos_apf] Migrate zos_apf to ZOAU 1.3 (#1204) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Modified test case and apf code * Updated exceptions * Updated changelog * Changed zos_apf exit call * Uncommented test case * Added try - finally statements to make sure we clean up the environment upon failure * Removed validation & fixed test case * Removed zos_data_set dependency * updated test cacse * Fixed typo * Added validation for volume * Updated changelog * Modified tests to only fetch volumes with VVDS once per session * Updated zos_apf tests --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- changelogs/fragments/1204-migrate-zos_apf.yml | 12 + plugins/modules/zos_apf.py | 29 +- tests/conftest.py | 14 +- tests/functional/modules/test_zos_apf_func.py | 811 +++++++++--------- tests/helpers/volumes.py | 44 +- 5 files changed, 517 insertions(+), 393 deletions(-) create mode 100644 changelogs/fragments/1204-migrate-zos_apf.yml diff --git a/changelogs/fragments/1204-migrate-zos_apf.yml b/changelogs/fragments/1204-migrate-zos_apf.yml new file mode 100644 index 000000000..89db1abd2 --- /dev/null +++ b/changelogs/fragments/1204-migrate-zos_apf.yml @@ -0,0 +1,12 @@ +bugfixes: + - zos_apf - List option only returned one data set. Fix now returns + the list of retrieved data sets. + (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + +minor_changes: + - zos_apf - Enhanced error messages when an exception is caught. + (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + +trivial: + - zos_apf - Migrated the module to use ZOAU v1.3.0 json schema. + (https://github.com/ansible-collections/ibm_zos_core/pull/1204). \ No newline at end of file diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index dee6094fc..bba3beb19 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -292,6 +292,7 @@ import re import json +from ansible.module_utils._text import to_text from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, backup as Backup) @@ -522,6 +523,12 @@ def main(): result['rc'] = operRc result['stdout'] = operOut if operation == 'list': + try: + data = json.loads(operOut) + data_sets = data["data"]["datasets"] + except Exception as e: + err_msg = "An exception occurred. See stderr for more details." + module.fail_json(msg=err_msg, stderr=to_text(e), rc=operErr) if not library: library = "" if not volume: @@ -529,18 +536,26 @@ def main(): if sms: sms = "*SMS*" if library or volume or sms: - try: - data = json.loads(operOut) - except json.JSONDecodeError: - module.exit_json(**result) - for d in data[2:]: + ds_list = "" + for d in data_sets: ds = d.get('ds') vol = d.get('vol') try: if (library and re.match(library, ds)) or (volume and re.match(volume, vol)) or (sms and sms == vol): - result['stdout'] = "{0} {1}\n".format(vol, ds) + ds_list = ds_list + "{0} {1}\n".format(vol, ds) except re.error: module.exit_json(**result) + result['stdout'] = ds_list + else: + """ + ZOAU 1.3 changed the output from apf, having the data set list inside a new "data" tag. + To keep consistency with previous ZOAU versions now we have to filter the json response. + """ + try: + result['stdout'] = json.dumps(data.get("data")) + except Exception as e: + err_msg = "An exception occurred. See stderr for more details." + module.fail_json(msg=err_msg, stderr=to_text(e), rc=operErr) module.exit_json(**result) diff --git a/tests/conftest.py b/tests/conftest.py index c8513ad37..7fea5ac0d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,7 +14,7 @@ __metaclass__ = type import pytest from ibm_zos_core.tests.helpers.ztest import ZTestHelper -from ibm_zos_core.tests.helpers.volumes import get_volumes +from ibm_zos_core.tests.helpers.volumes import get_volumes, get_volumes_with_vvds import sys from mock import MagicMock import importlib @@ -93,6 +93,18 @@ def volumes_on_systems(ansible_zos_module, request): list_Volumes = get_volumes(ansible_zos_module, path) yield list_Volumes + +@pytest.fixture(scope="session") +def volumes_with_vvds(ansible_zos_module, request): + """ Return a list of volumes that have a VVDS. If no volume has a VVDS + then it will try to create one for each volume found and return volumes only + if a VVDS was successfully created for it.""" + path = request.config.getoption("--zinventory") + volumes = get_volumes(ansible_zos_module, path) + volumes_with_vvds = get_volumes_with_vvds(ansible_zos_module, volumes) + yield volumes_with_vvds + + # * We no longer edit sys.modules directly to add zoautil_py mock # * because automatic teardown is not performed, leading to mock pollution # * across test files. diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 3c3d96ab2..9722b92fa 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -16,9 +16,6 @@ from ibm_zos_core.tests.helpers.volumes import Volume_Handler from shellescape import quote from pprint import pprint -import os -import sys -import pytest __metaclass__ = type @@ -53,103 +50,116 @@ def clean_test_env(hosts, test_info): hosts.all.shell(cmd=cmdStr) -def test_add_del(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) +def test_add_del(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(3,2) + hosts.all.shell(f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - clean_test_env(hosts, test_info) - - -def test_add_del_with_tmp_hlq_option(ansible_zos_module): - hosts = ansible_zos_module - tmphlq = "TMPHLQ" - test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True)) - test_info['tmp_hlq'] = tmphlq - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + finally: + clean_test_env(hosts, test_info) + + +def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + tmphlq = "TMPHLQ" + test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True)) + test_info['tmp_hlq'] = tmphlq + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + assert result.get("backup_name")[:6] == tmphlq + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - assert result.get("backup_name")[:6] == tmphlq - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - clean_test_env(hosts, test_info) - - -def test_add_del_volume(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", volume="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(1,1) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + finally: + clean_test_env(hosts, test_info) + + +def test_add_del_volume(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", volume="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - clean_test_env(hosts, test_info) + assert result.get("rc") == 0 + finally: + clean_test_env(hosts, test_info) """ +This test case was removed 3 years ago in the following PR : https://github.com/ansible-collections/ibm_zos_core/pull/197 def test_add_del_persist(ansible_zos_module): hosts = ansible_zos_module test_info = TEST_INFO['test_add_del_persist'] @@ -176,50 +186,54 @@ def test_add_del_persist(ansible_zos_module): """ -def test_add_del_volume_persist(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) - ds = get_tmp_ds_name(1,1) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " +def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) + for result in results.contacted.values(): + assert result.get("rc") == 0 + add_exptd = add_expected.format(test_info['library'], test_info['volume']) + add_exptd = add_exptd.replace(" ", "") + cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) results = hosts.all.shell(cmd=cmdStr) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" + actual = result.get("stdout") + actual = actual.replace(" ", "") + assert actual == add_exptd + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) + for result in results.contacted.values(): + assert result.get("rc") == 0 + del_exptd = del_expected.replace(" ", "") + cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) results = hosts.all.shell(cmd=cmdStr) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - add_exptd = add_expected.format(test_info['library'], test_info['volume']) - add_exptd = add_exptd.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - actual = result.get("stdout") - actual = actual.replace(" ", "") - assert actual == add_exptd - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - del_exptd = del_expected.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - actual = result.get("stdout") - actual = actual.replace(" ", "") - assert actual == del_exptd - clean_test_env(hosts, test_info) + actual = result.get("stdout") + actual = actual.replace(" ", "") + assert actual == del_exptd + finally: + clean_test_env(hosts, test_info) """ keyword: ENABLE-FOR-1-3 @@ -227,56 +241,58 @@ def test_add_del_volume_persist(ansible_zos_module): whoever works in issue https://github.com/ansible-collections/ibm_zos_core/issues/726 should uncomment this test as part of the validation process. """ -#def test_batch_add_del(ansible_zos_module): -# hosts = ansible_zos_module -# test_info = dict( -# batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")], -# persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True -# ) -# for item in test_info['batch']: -# ds = get_tmp_ds_name(1,1) -# hosts.all.shell(cmd="dtouch {0}".format(ds)) -# item['library'] = ds -# cmdStr = "dls -l " + ds + " | awk '{print $5}' " -# results = hosts.all.shell(cmd=cmdStr) -# for result in results.contacted.values(): -# vol = result.get("stdout") -# item['volume'] = vol -# prstds = get_tmp_ds_name(5,5) -# cmdStr = "dtouch {0}".format(prstds) -# hosts.all.shell(cmd=cmdStr) -# test_info['persistent']['data_set_name'] = prstds -# hosts.all.shell(cmd="echo \"{0}\" > {1}".format("Hello World, Here's Jhonny", prstds)) -# results = hosts.all.zos_apf(**test_info) -# pprint(vars(results)) -# for result in results.contacted.values(): -# assert result.get("rc") == 0 -# add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], -# test_info['batch'][1]['library'], test_info['batch'][1]['volume'], -# test_info['batch'][2]['library'], test_info['batch'][2]['volume']) -# add_exptd = add_exptd.replace(" ", "") -# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) -# results = hosts.all.shell(cmd=cmdStr) -# for result in results.contacted.values(): -# actual = result.get("stdout") -# actual = actual.replace(" ", "") -# assert actual == add_exptd -# test_info['state'] = 'absent' -# results = hosts.all.zos_apf(**test_info) -# pprint(vars(results)) -# for result in results.contacted.values(): -# assert result.get("rc") == 0 -# del_exptd = del_expected.replace(" ", "") -# cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) -# results = hosts.all.shell(cmd=cmdStr) -# for result in results.contacted.values(): -# actual = result.get("stdout") -# actual = actual.replace(" ", "") -# assert actual == del_exptd -# for item in test_info['batch']: -# clean_test_env(hosts, item) -# cmdStr = "drm {0}".format(test_info['persistent']['data_set_name']) -# hosts.all.shell(cmd=cmdStr) +def test_batch_add_del(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict( + batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")], + persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True + ) + for item in test_info['batch']: + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + item['library'] = ds + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + item['volume'] = vol + prstds = get_tmp_ds_name(5,5) + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("rc") == 0 + add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], + test_info['batch'][1]['library'], test_info['batch'][1]['volume'], + test_info['batch'][2]['library'], test_info['batch'][2]['volume']) + add_exptd = add_exptd.replace(" ", "") + cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + actual = result.get("stdout") + actual = actual.replace(" ", "") + assert actual == add_exptd + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) + pprint(vars(results)) + for result in results.contacted.values(): + assert result.get("rc") == 0 + del_exptd = del_expected.replace(" ", "") + cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + actual = result.get("stdout") + actual = actual.replace(" ", "") + assert actual == del_exptd + finally: + for item in test_info['batch']: + clean_test_env(hosts, item) + hosts.all.shell(cmd="drm {0}".format(test_info['persistent']['data_set_name'])) def test_operation_list(ansible_zos_module): @@ -285,111 +301,124 @@ def test_operation_list(ansible_zos_module): results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): listJson = result.get("stdout") + print(listJson) import json data = json.loads(listJson) - assert data[0]['format'] in ['DYNAMIC', 'STATIC'] + assert data['format'] in ['DYNAMIC', 'STATIC'] del json -def test_operation_list_with_filter(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) +def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + hosts.all.zos_apf(**test_info) + ti = dict(operation="list", library="") + ti['library'] = "ANSIBLE.*" + results = hosts.all.zos_apf(**ti) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - hosts.all.zos_apf(**test_info) - ti = dict(operation="list", library="") - ti['library'] = "APFTEST.*" - results = hosts.all.zos_apf(**ti) - for result in results.contacted.values(): - listFiltered = result.get("stdout") - assert test_info['library'] in listFiltered - test_info['state'] = 'absent' - hosts.all.zos_apf(**test_info) - clean_test_env(hosts, test_info) + listFiltered = result.get("stdout") + assert test_info['library'] in listFiltered + test_info['state'] = 'absent' + hosts.all.zos_apf(**test_info) + finally: + clean_test_env(hosts, test_info) # # Negative tests # -def test_add_already_present(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) +def test_add_already_present(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 0 + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 0 - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - assert result.get("rc") == 16 or result.get("rc") == 8 - test_info['state'] = 'absent' - hosts.all.zos_apf(**test_info) - clean_test_env(hosts, test_info) - - -def test_del_not_present(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(1,1) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("rc") == 16 or result.get("rc") == 8 + test_info['state'] = 'absent' + hosts.all.zos_apf(**test_info) + finally: + clean_test_env(hosts, test_info) + + +def test_del_not_present(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", state="present", force_dynamic=True) + ds = get_tmp_ds_name(1,1) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + test_info['state'] = 'absent' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - assert result.get("rc") == 16 or result.get("rc") == 8 - clean_test_env(hosts, test_info) + # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("rc") == 16 or result.get("rc") == 8 + finally: + clean_test_env(hosts, test_info) def test_add_not_found(ansible_zos_module): @@ -402,119 +431,135 @@ def test_add_not_found(ansible_zos_module): assert result.get("rc") == 16 or result.get("rc") == 8 -def test_add_with_wrong_volume(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", volume="", state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) +def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", volume="", state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + test_info['volume'] = 'T12345' + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) + # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("rc") == 16 or result.get("rc") == 8 + finally: + clean_test_env(hosts, test_info) + + +def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + cmdStr = "decho \"some text to test persistent data_set format validattion.\" \"{0}\"".format(test_info['persistent']['data_set_name']) hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - test_info['volume'] = 'T12345' - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - assert result.get("rc") == 16 or result.get("rc") == 8 - clean_test_env(hosts, test_info) - - -def test_persist_invalid_ds_format(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - cmdStr = "decho \"some text to test persistent data_set format validattion.\" \"{0}\"".format(test_info['persistent']['data_set_name']) - hosts.all.shell(cmd=cmdStr) - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 8 - clean_test_env(hosts, test_info) - - -def test_persist_invalid_marker(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) - for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - test_info['persistent']['marker'] = "# Invalid marker format" - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("rc") == 4 - clean_test_env(hosts, test_info) - - -def test_persist_invalid_marker_len(ansible_zos_module): - hosts = ansible_zos_module - test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) - test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) - hosts.all.shell(cmd="dtouch -tseq {0}".format(ds)) - test_info['library'] = ds - if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 8 + finally: + clean_test_env(hosts, test_info) + + +def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['marker'] = "# Invalid marker format" + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - vol = result.get("stdout") - test_info['volume'] = vol - if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + assert result.get("rc") == 4 + finally: + clean_test_env(hosts, test_info) + + +def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): + try: + hosts = ansible_zos_module + VolumeHandler = Volume_Handler(volumes_with_vvds) + volume = VolumeHandler.get_available_vol() + test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + test_info['state'] = 'present' + ds = get_tmp_ds_name(3,2) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + test_info['library'] = ds + if test_info.get('volume') is not None: + cmdStr = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + vol = result.get("stdout") + test_info['volume'] = vol + if test_info.get('persistent'): + cmdStr = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmdStr) + for result in results.contacted.values(): + prstds = result.get("stdout") + prstds = prstds[:30] + cmdStr = "dtouch -tseq {0}".format(prstds) + hosts.all.shell(cmd=cmdStr) + test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") + results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - prstds = result.get("stdout") - prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) - test_info['persistent']['data_set_name'] = prstds - test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") - results = hosts.all.zos_apf(**test_info) - for result in results.contacted.values(): - assert result.get("msg") == 'marker length may not exceed 72 characters' - clean_test_env(hosts, test_info) \ No newline at end of file + assert result.get("msg") == 'marker length may not exceed 72 characters' + finally: + clean_test_env(hosts, test_info) \ No newline at end of file diff --git a/tests/helpers/volumes.py b/tests/helpers/volumes.py index b0ed97d30..952cbb1e7 100644 --- a/tests/helpers/volumes.py +++ b/tests/helpers/volumes.py @@ -18,7 +18,7 @@ import pytest import time import yaml - +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name class Volume: """ Volume class represents a volume on the z system, it tracks if the volume name and status of the volume with respect to the current test session.""" @@ -118,4 +118,44 @@ def read_test_config(path): if len(config["VOLUMES"]) > 0: return config["VOLUMES"] else: - return None \ No newline at end of file + return None + +def get_volumes_with_vvds( ansible_zos_module, volumes_on_system): + """ + Get a list of volumes that contain a VVDS, if no volume has a VVDS then + creates one on any volume. + """ + volumes_with_vvds = find_volumes_with_vvds(ansible_zos_module, volumes_on_system) + if len(volumes_with_vvds) == 0 and len(volumes_on_system) > 0: + volumes_with_vvds = list() + for volume in volumes_on_system: + if create_vvds_on_volume(ansible_zos_module, volume): + volumes_with_vvds.append(volume) + return volumes_with_vvds + +def find_volumes_with_vvds( ansible_zos_module, volumes_on_system): + """ + Fetches all VVDS in the system and returns a list of volumes for + which there are VVDS. + """ + hosts = ansible_zos_module + vls_result = hosts.all.shell(cmd="vls SYS1.VVDS.*") + for vls_res in vls_result.contacted.values(): + vvds_list = vls_res.get("stdout") + return [volume for volume in volumes_on_system if volume in vvds_list] + +def create_vvds_on_volume( ansible_zos_module, volume): + """ + Creates a vvds on a volume by allocating a small VSAM and then deleting it. + """ + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name(mlq_size=7, llq_size=7) + hosts.all.shell(cmd=f"dtouch -tesds -s10K -V{volume} {data_set_name}") + # Remove that dataset + hosts.all.shell(cmd=f"drm {data_set_name}") + # Verify that the VVDS is in place + vls_result = hosts.all.shell(cmd=f"vls SYS1.VVDS.V{volume} ") + for vls_res in vls_result.contacted.values(): + if vls_res.get("rc") == 0: + return True + return False \ No newline at end of file From dc4fdc3b139f88b568c2866c1792a9052557c177 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 20 Feb 2024 13:23:38 -0700 Subject: [PATCH 303/495] [v1.10.0] [zos_copy] zos_copy migration to ZOAU v1.3.0 (#1222) * Update calls to datasets API * Update tests * Update zos_job_submit tests that depend on zos_copy * Add changelog fragment * Enable tests that depend on zos_encode * Remove calls to datasets._copy * Fixed pep8 issue * Fixed bug when copying from MVS to USS --- .../1222-zoau-migration-zos_copy.yml | 3 + plugins/module_utils/job.py | 12 +- plugins/modules/zos_copy.py | 123 +++-- .../functional/modules/test_zos_copy_func.py | 8 +- .../modules/test_zos_job_submit_func.py | 427 ++++++++---------- 5 files changed, 286 insertions(+), 287 deletions(-) create mode 100644 changelogs/fragments/1222-zoau-migration-zos_copy.yml diff --git a/changelogs/fragments/1222-zoau-migration-zos_copy.yml b/changelogs/fragments/1222-zoau-migration-zos_copy.yml new file mode 100644 index 000000000..edc6eec06 --- /dev/null +++ b/changelogs/fragments/1222-zoau-migration-zos_copy.yml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Migrated the module to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1222). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index a854a0a1e..4a432d764 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -273,13 +273,21 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["owner"] = entry.owner job["ret_code"] = dict() - job["ret_code"]["msg"] = "{0} {1}".format(entry.status, entry.return_code) + + # From v1.3.0, ZOAU sets unavailable job fields as None, instead of '?'. + # This new way of constructing msg allows for a better empty message. + # "" instead of "None None". + job["ret_code"]["msg"] = "{0} {1}".format( + entry.status if entry.status else "", + entry.return_code if entry.return_code else "" + ).strip() + job["ret_code"]["msg_code"] = entry.return_code job["ret_code"]["code"] = None if entry.return_code and len(entry.return_code) > 0: if entry.return_code.isdigit(): job["ret_code"]["code"] = int(entry.return_code) - job["ret_code"]["msg_text"] = entry.status + job["ret_code"]["msg_text"] = entry.status if entry.status else "?" # Beginning in ZOAU v1.3.0, the Job class changes svc_class to # service_class. diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index e07b44a97..a854d1cae 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -804,7 +804,7 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import ( idcams @@ -829,6 +829,7 @@ import math import tempfile import os +import traceback if PY3: from re import fullmatch @@ -839,7 +840,13 @@ try: from zoautil_py import datasets, opercmd except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + opercmd = ZOAUImportError(traceback.format_exc()) + +try: + from zoautil_py import exceptions as zoau_exceptions +except ImportError: + zoau_exceptions = ZOAUImportError(traceback.format_exc()) class CopyHandler(object): @@ -909,6 +916,14 @@ def copy_to_seq( if src_type == 'USS' and self.asa_text: response = copy.copy_asa_uss2mvs(new_src, dest) + + if response.rc != 0: + raise CopyOperationError( + msg="Unable to copy source {0} to {1}".format(new_src, dest), + rc=response.rc, + stdout=response.stdout_response, + stderr=response.stderr_response + ) else: # While ASA files are just text files, we do a binary copy # so dcp doesn't introduce any additional blanks or newlines. @@ -918,14 +933,15 @@ def copy_to_seq( if self.force_lock: copy_args["options"] += " -f" - response = datasets._copy(new_src, dest, None, **copy_args) - if response.rc != 0: - raise CopyOperationError( - msg="Unable to copy source {0} to {1}".format(new_src, dest), - rc=response.rc, - stdout=response.stdout_response, - stderr=response.stderr_response - ) + try: + datasets.copy(new_src, dest, **copy_args) + except zoau_exceptions.ZOAUException as copy_exception: + raise CopyOperationError( + msg="Unable to copy source {0} to {1}".format(new_src, dest), + rc=copy_exception.response.rc, + stdout=copy_exception.response.stdout_response, + stderr=copy_exception.response.stderr_response + ) def copy_to_vsam(self, src, dest): """Copy source VSAM to destination VSAM. @@ -988,9 +1004,11 @@ def _copy_tree(self, entries, src, dest, dirs_exist_ok=False): else: opts = dict() opts["options"] = "" - response = datasets._copy(src_name, dest_name, None, **opts) - if response.rc > 0: - raise Exception(response.stderr_response) + + try: + datasets.copy(src_name, dest_name, **opts) + except zoau_exceptions.ZOAUException as copy_exception: + raise Exception(copy_exception.response.stderr_response) shutil.copystat(src_name, dest_name, follow_symlinks=True) except Exception as err: raise err @@ -1356,14 +1374,17 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): else: opts = dict() opts["options"] = "" - response = datasets._copy(new_src, dest, None, **opts) - if response.rc > 0: - raise Exception(response.stderr_response) + datasets.copy(new_src, dest, **opts) shutil.copystat(new_src, dest, follow_symlinks=True) # shutil.copy(new_src, dest) if self.executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) + except zoau_exceptions.ZOAUException as err: + raise CopyOperationError( + msg="Unable to copy file {0} to {1}".format(new_src, dest), + stderr=err.response.stderr_response, + ) except OSError as err: raise CopyOperationError( msg="Destination {0} is not writable".format(dest), @@ -1549,12 +1570,21 @@ def _mvs_copy_to_uss( if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: if self.asa_text: response = copy.copy_asa_mvs2uss(src, dest) + rc = response.rc elif self.executable: - response = datasets._copy(src, dest, alias=True, executable=True) + try: + rc = datasets.copy(src, dest, alias=True, executable=True) + except zoau_exceptions.ZOAUException as copy_exception: + response = copy_exception.response + rc = response.rc else: - response = datasets._copy(src, dest) + try: + rc = datasets.copy(src, dest) + except zoau_exceptions.ZOAUException as copy_exception: + response = copy_exception.response + rc = response.rc - if response.rc != 0: + if rc != 0: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), rc=response.rc, @@ -1563,14 +1593,14 @@ def _mvs_copy_to_uss( ) else: if self.executable: - response = datasets._copy(src, dest, None, alias=True, executable=True) - - if response.rc != 0: + try: + datasets.copy(src, dest, alias=True, executable=True) + except zoau_exceptions.ZOAUException as copy_exception: raise CopyOperationError( msg="Error while copying source {0} to {1}".format(src, dest), - rc=response.rc, - stdout=response.stdout_response, - stderr=response.stderr_response + rc=copy_exception.response.rc, + stdout=copy_exception.response.stdout_response, + stderr=copy_exception.response.stderr_response ) elif self.asa_text: response = copy.copy_asa_pds2uss(src, dest) @@ -1785,6 +1815,7 @@ def copy_to_member( if src_type == 'USS' and self.asa_text: response = copy.copy_asa_uss2mvs(src, dest) + rc, out, err = response.rc, response.stdout_response, response.stderr_response else: # While ASA files are just text files, we do a binary copy # so dcp doesn't introduce any additional blanks or newlines. @@ -1794,8 +1825,14 @@ def copy_to_member( if self.force_lock: opts["options"] += " -f" - response = datasets._copy(src, dest, alias=self.aliases, executable=self.executable, **opts) - rc, out, err = response.rc, response.stdout_response, response.stderr_response + try: + rc = datasets.copy(src, dest, alias=self.aliases, executable=self.executable, **opts) + out = "" + err = "" + except zoau_exceptions.ZOAUException as copy_exception: + rc = copy_exception.response.rc + out = copy_exception.response.stdout_response + err = copy_exception.response.stderr_response return dict( rc=rc, @@ -1852,8 +1889,8 @@ def dump_data_set_member_to_file(data_set_member, is_binary): if is_binary: copy_args["options"] = "-B" - response = datasets._copy(data_set_member, temp_path, None, **copy_args) - if response.rc != 0 or response.stderr_response: + response = datasets.copy(data_set_member, temp_path, **copy_args) + if response != 0: raise DataSetMemberAttributeError(data_set_member) return temp_path @@ -2315,7 +2352,7 @@ def get_attributes_of_any_dataset_created( volume=volume ) else: - src_attributes = datasets.listing(src_name)[0] + src_attributes = datasets.list_datasets(src_name)[0] size = int(src_attributes.total_space) params = get_data_set_attributes( dest, @@ -2397,8 +2434,8 @@ def allocate_destination_data_set( try: # Dumping the member into a file in USS to compute the record length and # size for the new data set. - src_attributes = datasets.listing(src_name)[0] - record_length = int(src_attributes.lrecl) + src_attributes = datasets.list_datasets(src_name)[0] + record_length = int(src_attributes.record_length) temp_dump = dump_data_set_member_to_file(src, is_binary) create_seq_dataset_from_file( temp_dump, @@ -2417,11 +2454,11 @@ def allocate_destination_data_set( if src_ds_type in data_set.DataSet.MVS_PARTITIONED: data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: - src_attributes = datasets.listing(src_name)[0] + src_attributes = datasets.list_datasets(src_name)[0] # The size returned by listing is in bytes. size = int(src_attributes.total_space) - record_format = src_attributes.recfm - record_length = int(src_attributes.lrecl) + record_format = src_attributes.record_format + record_length = int(src_attributes.record_length) dest_params = get_data_set_attributes( dest, size, @@ -2507,8 +2544,8 @@ def allocate_destination_data_set( asa_text, volume ) - dest_attributes = datasets.listing(dest)[0] - record_format = dest_attributes.recfm + dest_attributes = datasets.list_datasets(dest)[0] + record_format = dest_attributes.record_format dest_params["type"] = dest_ds_type dest_params["record_format"] = record_format return True, dest_params @@ -2730,8 +2767,8 @@ def run_module(module, arg_def): src_ds_type = data_set.DataSet.data_set_type(src_name) if src_ds_type not in data_set.DataSet.MVS_VSAM: - src_attributes = datasets.listing(src_name)[0] - if src_attributes.recfm == 'FBA' or src_attributes.recfm == 'VBA': + src_attributes = datasets.list_datasets(src_name)[0] + if src_attributes.record_format == 'FBA' or src_attributes.record_format == 'VBA': src_has_asa_chars = True else: raise NonExistentSourceError(src) @@ -2785,8 +2822,8 @@ def run_module(module, arg_def): elif not dest_exists and asa_text: dest_has_asa_chars = True elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: - dest_attributes = datasets.listing(dest_name)[0] - if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA': + dest_attributes = datasets.list_datasets(dest_name)[0] + if dest_attributes.record_format == 'FBA' or dest_attributes.record_format == 'VBA': dest_has_asa_chars = True if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): @@ -2794,8 +2831,8 @@ def run_module(module, arg_def): elif not dest_exists and asa_text: dest_has_asa_chars = True elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: - dest_attributes = datasets.listing(dest_name)[0] - if dest_attributes.recfm == 'FBA' or dest_attributes.recfm == 'VBA': + dest_attributes = datasets.list_datasets(dest_name)[0] + if dest_attributes.record_format == 'FBA' or dest_attributes.record_format == 'VBA': dest_has_asa_chars = True if dest_ds_type in data_set.DataSet.MVS_PARTITIONED: diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 1cb3cb7cb..cf7f1494b 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -4347,16 +4347,21 @@ def test_backup_pds(ansible_zos_module, args): def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() + source_member = f"{source}(MEM)" dest = get_tmp_ds_name() volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() + if volume_1 == "SCR03": volume = volumes.get_available_vol() volumes.free_vol(volume_1) volume_1 = volume + try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - hosts.all.zos_data_set(name=source_member, type="member", state='present') + if src_type != "seq": + hosts.all.zos_data_set(name=source_member, type="member", state='present') + copy_res = hosts.all.zos_copy( src=source, dest=dest, @@ -4406,7 +4411,6 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): finally: hosts.all.zos_data_set(name=dest_ds, state="absent") - @pytest.mark.vsam @pytest.mark.parametrize("force", [False, True]) def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 451f79c83..9de3e992a 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -352,64 +352,45 @@ def test_job_submit_USS(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_job_submit_LOCAL(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) -# for result in results.contacted.values(): -# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" -# assert result.get("jobs")[0].get("ret_code").get("code") == 0 -# assert result.get("changed") is True +def test_job_submit_LOCAL(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + for result in results.contacted.values(): + print(result) + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_job_submit_LOCAL_extraR(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_BACKSLASH_R) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) -# for result in results.contacted.values(): -# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" -# assert result.get("jobs")[0].get("ret_code").get("code") == 0 -# assert result.get("changed") is True +def test_job_submit_LOCAL_extraR(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_BACKSLASH_R) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + for result in results.contacted.values(): + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_job_submit_LOCAL_BADJCL(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_BAD) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait=True) -# for result in results.contacted.values(): -# # Expecting: The job completion code (CC) was not in the job log....." -# assert result.get("changed") is False -# assert re.search(r'completion code', repr(result.get("msg"))) +def test_job_submit_LOCAL_BADJCL(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_BAD) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'completion code', repr(result.get("msg"))) def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): @@ -546,137 +527,123 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): hosts.all.zos_data_set(name=data_set_name, state="absent") -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# @pytest.mark.parametrize("args", [ -# dict(max_rc=None, wait_time_s=10), -# dict(max_rc=4, wait_time_s=10), -# dict(max_rc=12, wait_time_s=20) -# ]) -# def test_job_submit_max_rc(ansible_zos_module, args): -# """This""" -# try: -# hosts = ansible_zos_module -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_RC_8) - -# results = hosts.all.zos_job_submit( -# src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] -# ) - -# for result in results.contacted.values(): -# # Should fail normally as a non-zero RC will result in job submit failure -# if args["max_rc"] is None: -# assert result.get("msg") is not None -# assert result.get('changed') is False -# # On busy systems, it is possible that the duration even for a job with a non-zero return code -# # will take considerable time to obtain the job log and thus you could see either error msg below -# #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" -# # - Consider using module zos_job_query to poll for a long running job or -# # increase option \\'wait_times_s` to a value greater than 10.", -# if result.get('duration'): -# duration = result.get('duration') -# else: -# duration = 0 - -# if duration >= args["wait_time_s"]: -# re.search(r'long running job', repr(result.get("msg"))) -# else: -# assert re.search(r'non-zero', repr(result.get("msg"))) - -# # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 -# elif args["max_rc"] == 4: -# assert result.get("msg") is not None -# assert result.get('changed') is False -# # Expecting "The job return code, 'ret_code[code]' 8 for the submitted job is greater -# # than the value set for option 'max_rc' 4. Increase the value for 'max_rc' otherwise -# # this job submission has failed. -# assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) - -# elif args["max_rc"] == 12: -# # Will not fail but changed will be false for the non-zero RC, there -# # are other possibilities like an ABEND or JCL ERROR will fail this even -# # with a MAX RC -# assert result.get("msg") is None -# assert result.get('changed') is False -# assert result.get("jobs")[0].get("ret_code").get("code") < 12 -# finally: -# hosts.all.file(path=tmp_file.name, state="absent") +@pytest.mark.parametrize("args", [ + dict(max_rc=None, wait_time_s=10), + dict(max_rc=4, wait_time_s=10), + dict(max_rc=12, wait_time_s=20) +]) +def test_job_submit_max_rc(ansible_zos_module, args): + """This""" + try: + hosts = ansible_zos_module + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_RC_8) + results = hosts.all.zos_job_submit( + src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] + ) + + for result in results.contacted.values(): + # Should fail normally as a non-zero RC will result in job submit failure + if args["max_rc"] is None: + assert result.get("msg") is not None + assert result.get('changed') is False + # On busy systems, it is possible that the duration even for a job with a non-zero return code + # will take considerable time to obtain the job log and thus you could see either error msg below + #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" + # - Consider using module zos_job_query to poll for a long running job or + # increase option \\'wait_times_s` to a value greater than 10.", + if result.get('duration'): + duration = result.get('duration') + else: + duration = 0 + + if duration >= args["wait_time_s"]: + re.search(r'long running job', repr(result.get("msg"))) + else: + assert re.search(r'non-zero', repr(result.get("msg"))) + + # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 + elif args["max_rc"] == 4: + assert result.get("msg") is not None + assert result.get('changed') is False + # Expecting "The job return code, 'ret_code[code]' 8 for the submitted job is greater + # than the value set for option 'max_rc' 4. Increase the value for 'max_rc' otherwise + # this job submission has failed. + assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) + + elif args["max_rc"] == 12: + # Will not fail but changed will be false for the non-zero RC, there + # are other possibilities like an ABEND or JCL ERROR will fail this even + # with a MAX RC + assert result.get("msg") is None + assert result.get('changed') is False + assert result.get("jobs")[0].get("ret_code").get("code") < 12 + finally: + hosts.all.file(path=tmp_file.name, state="absent") -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# @pytest.mark.template -# @pytest.mark.parametrize("args", [ -# dict( -# template="Default", -# options=dict( -# keep_trailing_newline=False -# ) -# ), -# dict( -# template="Custom", -# options=dict( -# keep_trailing_newline=False, -# variable_start_string="((", -# variable_end_string="))", -# comment_start_string="(#", -# comment_end_string="#)" -# ) -# ), -# dict( -# template="Loop", -# options=dict( -# keep_trailing_newline=False -# ) -# ) -# ]) -# def test_job_submit_jinja_template(ansible_zos_module, args): -# try: -# hosts = ansible_zos_module - -# tmp_file = tempfile.NamedTemporaryFile(delete=False) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_TEMPLATES[args["template"]]) - -# template_vars = dict( -# pgm_name="HELLO", -# input_dataset="DUMMY", -# message="Hello, world", -# steps=[ -# dict(step_name="IN", dd="DUMMY"), -# dict(step_name="PRINT", dd="SYSOUT=*"), -# dict(step_name="UT1", dd="*") -# ] -# ) -# for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): -# host.vars.update(template_vars) - -# results = hosts.all.zos_job_submit( -# src=tmp_file.name, -# location="LOCAL", -# use_template=True, -# template_parameters=args["options"] -# ) - -# for result in results.contacted.values(): -# assert result.get('changed') is True -# assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" -# assert result.get("jobs")[0].get("ret_code").get("code") == 0 - -# finally: -# os.remove(tmp_file.name) + +@pytest.mark.template +@pytest.mark.parametrize("args", [ + dict( + template="Default", + options=dict( + keep_trailing_newline=False + ) + ), + dict( + template="Custom", + options=dict( + keep_trailing_newline=False, + variable_start_string="((", + variable_end_string="))", + comment_start_string="(#", + comment_end_string="#)" + ) + ), + dict( + template="Loop", + options=dict( + keep_trailing_newline=False + ) + ) +]) +def test_job_submit_jinja_template(ansible_zos_module, args): + try: + hosts = ansible_zos_module + + tmp_file = tempfile.NamedTemporaryFile(delete=False) + with open(tmp_file.name, "w") as f: + f.write(JCL_TEMPLATES[args["template"]]) + + template_vars = dict( + pgm_name="HELLO", + input_dataset="DUMMY", + message="Hello, world", + steps=[ + dict(step_name="IN", dd="DUMMY"), + dict(step_name="PRINT", dd="SYSOUT=*"), + dict(step_name="UT1", dd="*") + ] + ) + for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): + host.vars.update(template_vars) + + results = hosts.all.zos_job_submit( + src=tmp_file.name, + location="LOCAL", + use_template=True, + template_parameters=args["options"] + ) + + for result in results.contacted.values(): + assert result.get('changed') is True + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + + finally: + os.remove(tmp_file.name) def test_job_submit_full_input(ansible_zos_module): @@ -702,66 +669,46 @@ def test_job_submit_full_input(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_NO_DSN) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") -# for result in results.contacted.values(): -# # Expecting: The job completion code (CC) was not in the job log....." -# assert result.get("changed") is False -# assert re.search(r'completion code', repr(result.get("msg"))) -# assert result.get("jobs")[0].get("job_id") is not None - -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# Should have a JCL ERROR <int> -# def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_INVALID_USER) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") -# for result in results.contacted.values(): -# # Expecting: The job completion code (CC) was not in the job log....." -# assert result.get("changed") is False -# assert re.search(r'return code was not available', repr(result.get("msg"))) -# assert re.search(r'error SEC', repr(result.get("msg"))) -# assert result.get("jobs")[0].get("job_id") is not None -# assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) +def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_NO_DSN) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'completion code', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None -""" -keyword: ENABLE-FOR-1-3 -Test commented because it depends on zos_copy, which has not yet been -migrated to ZOAU v1.3.0. Whoever works in issue -https://github.com/ansible-collections/ibm_zos_core/issues/1106 -should uncomment this test as part of the validation process. -""" -# def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): -# tmp_file = tempfile.NamedTemporaryFile(delete=True) -# with open(tmp_file.name, "w") as f: -# f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) -# hosts = ansible_zos_module -# results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") -# for result in results.contacted.values(): -# # Expecting: The job completion code (CC) was not in the job log....." -# assert result.get("changed") is False -# assert re.search(r'return code was not available', repr(result.get("msg"))) -# assert re.search(r'error ? ?', repr(result.get("msg"))) -# assert result.get("jobs")[0].get("job_id") is not None -# assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" +# Should have a JCL ERROR <int> +def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_INVALID_USER) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'return code was not available', repr(result.get("msg"))) + assert re.search(r'error SEC', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + + +def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): + # Expecting: The job completion code (CC) was not in the job log....." + assert result.get("changed") is False + assert re.search(r'return code was not available', repr(result.get("msg"))) + assert re.search(r'error ? ?', repr(result.get("msg"))) + assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" From 731f39e15604334195b84ce93ca47162214e42a8 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 20 Feb 2024 14:32:04 -0600 Subject: [PATCH 304/495] [v1.10.0][zos_archive] Migrate zos_archive to use ZOAU 1.3 (#1227) * Removed helpers * Updated code to use 1.3 * Removed to_dict * Added changelog * Added changlog * Update zos_unarchive.py * Removed command calls --- .../fragments/1227-migrate-zos_archive.yml | 3 +++ plugins/modules/zos_archive.py | 25 +++++++++---------- 2 files changed, 15 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1227-migrate-zos_archive.yml diff --git a/changelogs/fragments/1227-migrate-zos_archive.yml b/changelogs/fragments/1227-migrate-zos_archive.yml new file mode 100644 index 000000000..820593c95 --- /dev/null +++ b/changelogs/fragments/1227-migrate-zos_archive.yml @@ -0,0 +1,3 @@ +trivial: + - zos_archive - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1227). diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index f5306bb25..959d263d9 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -424,7 +424,7 @@ mvs_cmd, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) import os import tarfile @@ -433,13 +433,14 @@ import glob import re import math +import traceback from hashlib import sha256 try: from zoautil_py import datasets except Exception: - Datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) XMIT_RECORD_LENGTH = 80 AMATERSE_RECORD_LENGTH = 1024 @@ -789,11 +790,9 @@ def _create_dest_data_set( if tmp_hlq: hlq = tmp_hlq else: - rc, hlq, err = self.module.run_command("hlq") - hlq = hlq.replace('\n', '') - cmd = "mvstmphelper {0}.DZIP".format(hlq) - rc, temp_ds, err = self.module.run_command(cmd) - arguments.update(name=temp_ds.replace('\n', '')) + hlq = datasets.get_hlq() + temp_ds = datasets.tmp_name(high_level_qualifier=hlq) + arguments.update(name=temp_ds) if record_format is None: arguments.update(record_format="FB") @@ -902,8 +901,8 @@ def expand_mvs_paths(self, paths): expanded_path = [] for path in paths: if '*' in path: - e_paths = datasets.listing(path) - e_paths = [path.name for path in e_paths] + # list_dataset_names returns a list of data set names or empty. + e_paths = datasets.list_dataset_names(path) else: e_paths = [path] expanded_path.extend(e_paths) @@ -946,11 +945,11 @@ def compute_dest_size(self): {int} - Destination computed space in kilobytes. """ if self.dest_data_set.get("space_primary") is None: - dest_space = 0 + dest_space = 1 for target in self.targets: - data_sets = datasets.listing(target) + data_sets = datasets.list_datasets(target) for ds in data_sets: - dest_space += int(ds.to_dict().get("total_space")) + dest_space += int(ds.total_space) # space unit returned from listings is bytes dest_space = math.ceil(dest_space / 1024) self.dest_data_set.update(space_primary=dest_space, space_type="K") From 066864e09829890f79ebc8d619d5162cc676de15 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 20 Feb 2024 15:26:18 -0600 Subject: [PATCH 305/495] [v1.10.0][zos_find] Removed lineinfile dependency and test with ZOAU 1.3 (#1228) * Removed lineinfile dependency * Added changelog * Updated copyright years * Updated copyright years --- .../1228-zos_find-remove-zos_lineinfile_dep.yml | 3 +++ tests/functional/modules/test_zos_find_func.py | 10 +++++----- 2 files changed, 8 insertions(+), 5 deletions(-) create mode 100644 changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml diff --git a/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml b/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml new file mode 100644 index 000000000..67642d563 --- /dev/null +++ b/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml @@ -0,0 +1,3 @@ +trivial: + - zos_find - Removed zos_lineinfile dependency from test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1228). diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 50782be0b..3a30d9510 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -63,7 +63,7 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): batch=[dict(name=i, type='seq', state='present') for i in SEQ_NAMES] ) for ds in SEQ_NAMES: - hosts.all.zos_lineinfile(src=ds, line=search_string) + hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}\" ") find_res = hosts.all.zos_find( patterns=['TEST.FIND.SEQ.*.*'], @@ -91,9 +91,9 @@ def test_find_sequential_data_sets_multiple_patterns(ansible_zos_module): batch=[dict(name=i, type='seq', state='present') for i in SEQ_NAMES] ) hosts.all.zos_data_set(name=new_ds, type='seq', state='present') - hosts.all.zos_lineinfile(src=new_ds, line="incorrect string") + hosts.all.shell(cmd=f"decho 'incorrect string' \"{new_ds}\" ") for ds in SEQ_NAMES: - hosts.all.zos_lineinfile(src=ds, line=search_string) + hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}\" ") find_res = hosts.all.zos_find( patterns=['TEST.FIND.SEQ.*.*', 'TEST.INVALID.*'], @@ -131,7 +131,7 @@ def test_find_pds_members_containing_string(ansible_zos_module): ] ) for ds in PDS_NAMES: - hosts.all.zos_lineinfile(src=ds + "(MEMBER)", line=search_string) + hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}(MEMBER)\" ") find_res = hosts.all.zos_find( pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], From 1de0bc1aa35a3bcfe85facf864b698f990930bb2 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 20 Feb 2024 15:49:42 -0600 Subject: [PATCH 306/495] [v1.10.0][zos_fetch] Migrate zos_fetch to ZOAU 1.3 (#1229) * Modified code to use ZOAU 1.3 * Updated job_submit call * Add fragment * Added copyright year --- .../fragments/1229-migrate-zos_fetch.yml | 3 ++ plugins/modules/zos_fetch.py | 32 +++++++++---------- .../functional/modules/test_zos_fetch_func.py | 2 +- 3 files changed, 20 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1229-migrate-zos_fetch.yml diff --git a/changelogs/fragments/1229-migrate-zos_fetch.yml b/changelogs/fragments/1229-migrate-zos_fetch.yml new file mode 100644 index 000000000..07f9a26b4 --- /dev/null +++ b/changelogs/fragments/1229-migrate-zos_fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1229). diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index d8b15c0d9..2b32f0760 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -271,7 +271,7 @@ import tempfile import re import os - +import traceback from math import ceil from shutil import rmtree from ansible.module_utils.basic import AnsibleModule @@ -285,16 +285,16 @@ validation, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) try: - from zoautil_py import datasets, mvscmd, types + from zoautil_py import datasets, mvscmd, ztypes except Exception: - datasets = MissingZOAUImport() - mvscmd = MissingZOAUImport() - types = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + mvscmd = ZOAUImportError(traceback.format_exc()) + ztypes = ZOAUImportError(traceback.format_exc()) class FetchHandler: @@ -373,23 +373,23 @@ def _copy_vsam_to_temp_data_set(self, ds_name): dd_statements = [] dd_statements.append( - types.DDStatement( - name="sysin", definition=types.DatasetDefinition(sysin) + ztypes.DDStatement( + name="sysin", definition=ztypes.DatasetDefinition(sysin) ) ) dd_statements.append( - types.DDStatement( - name="input", definition=types.DatasetDefinition(ds_name) + ztypes.DDStatement( + name="input", definition=ztypes.DatasetDefinition(ds_name) ) ) dd_statements.append( - types.DDStatement( - name="output", definition=types.DatasetDefinition(out_ds_name) + ztypes.DDStatement( + name="output", definition=ztypes.DatasetDefinition(out_ds_name) ) ) dd_statements.append( - types.DDStatement( - name="sysprint", definition=types.FileDefinition(sysprint) + ztypes.DDStatement( + name="sysprint", definition=ztypes.FileDefinition(sysprint) ) ) @@ -591,7 +591,7 @@ def run_module(): src = module.params.get("src") if module.params.get("use_qualifier"): - module.params["src"] = datasets.hlq() + "." + src + module.params["src"] = datasets.get_hlq() + "." + src # ********************************************************** # # Verify paramater validity # diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 357540876..b239bbbd9 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -264,7 +264,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(volume_1, test_vsam)), temp_jcl_path) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(temp_jcl_path), location="USS", wait=True + src="{0}/SAMPLE".format(temp_jcl_path), location="USS", wait_time_s=30 ) hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE)) hosts.all.zos_encode( From d739905aa9d4ce13f34e696a4dd1c3638d45130e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Wed, 21 Feb 2024 12:10:10 -0600 Subject: [PATCH 307/495] [v1.10.0][Zos Operator Action Query]Migrate zos operator action query (#1215) * Add timeout x100 * Add error messages for false positives and migrate module * Fix ansible-lint * Fix test case new iteration * Return previous behaviour * Return behaviour * Fix ansible lint * Fix ansible lint * Fix ansible lint * Add fragment * Revert "Add fragment" This reverts commit a434c410cb8746ed65c69eba905137bcc7307708. * Add fragment * Fix commends on variable names and coments * Change to timeout_S --- ...1215-Migrate_zos_operator_action_query.yml | 4 ++ plugins/modules/zos_operator_action_query.py | 37 ++++++++++--------- .../test_zos_operator_action_query_func.py | 2 +- 3 files changed, 24 insertions(+), 19 deletions(-) create mode 100644 changelogs/fragments/1215-Migrate_zos_operator_action_query.yml diff --git a/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml b/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml new file mode 100644 index 000000000..be18056b3 --- /dev/null +++ b/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml @@ -0,0 +1,4 @@ +trivial: + - zos_operator_action_query - Update internal functions to account for the change to the + unit of measurement of `timeout` now in centiseconds. + (https://github.com/ansible-collections/ibm_zos_core/pull/1215). \ No newline at end of file diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 022708692..55cd7cd00 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -219,11 +219,12 @@ from ansible.module_utils.basic import AnsibleModule import re +import traceback from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( @@ -233,7 +234,7 @@ try: from zoautil_py import opercmd except Exception: - opercmd = MissingZOAUImport() + opercmd = ZOAUImportError(traceback.format_exc()) def run_module(): @@ -272,7 +273,7 @@ def run_module(): cmdtxt = "d r,a,s" - cmd_result_a = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + cmd_result_a = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs) if cmd_result_a.rc > 0: module.fail_json( @@ -287,7 +288,7 @@ def run_module(): cmdtxt = "d r,a,jn" - cmd_result_b = execute_command(cmdtxt, timeout=wait_s, *args, **kwargs) + cmd_result_b = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs) if cmd_result_b.rc > 0: module.fail_json( @@ -395,35 +396,35 @@ def filter_requests(merged_list, params): message_id = params.get("message_id") job_name = params.get("job_name") newlist = merged_list - if system: newlist = handle_conditions(newlist, "system", system) if job_name: newlist = handle_conditions(newlist, "job_name", job_name) if message_id: newlist = handle_conditions(newlist, "message_id", message_id) - return newlist -def handle_conditions(list, condition_type, value): +def handle_conditions(merged_list, condition_type, value): # regex = re.compile(condition_values) newlist = [] - for dict in list: - if value.endswith("*"): - exist = dict.get(condition_type).startswith(value.rstrip("*")) - else: - exist = dict.get(condition_type) == value + exist = False + for message in merged_list: + if message.get(condition_type) is not None: + if value.endswith("*"): + exist = message.get(condition_type).startswith(value.rstrip("*")) + else: + exist = message.get(condition_type) == value if exist: - newlist.append(dict) + newlist.append(message) return newlist -def execute_command(operator_cmd, timeout=1, *args, **kwargs): - - # response = opercmd.execute(operator_cmd) - response = opercmd.execute(operator_cmd, timeout, *args, **kwargs) +def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): + # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: + timeout_c = 100 * timeout_s + response = opercmd.execute(operator_cmd, timeout_c, *args, **kwargs) rc = response.rc stdout = response.stdout_response diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index c7afab2f9..950e6900f 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at From 80bbc263923f92a5c719afc96cdb560995466e85 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Sat, 24 Feb 2024 15:07:52 -0600 Subject: [PATCH 308/495] [v1.10.0][zos_unarchive] Migrate zos_unarchive to use ZOAU 1.3 (#1238) * Update calls to datasets API * Update tests * Update zos_job_submit tests that depend on zos_copy * Add changelog fragment * Enable tests that depend on zos_encode * Remove calls to datasets._copy * Fixed pep8 issue * Fixed bug when copying from MVS to USS * Removed helpers * Updated code to use 1.3 * Removed to_dict * Added changelog * Added changlog * Update zos_unarchive.py * Initial changes for zos_unarchive * Removed command calls * Commented test cases * Added changelog * Uncommented test cases * Update zos_unarchive.py * Update test_zos_unarchive_func.py --------- Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> --- .../fragments/1238-migrate-zos_unarchive.yml | 4 ++++ plugins/modules/zos_unarchive.py | 19 +++++++++---------- .../modules/test_zos_unarchive_func.py | 14 +++++++++----- 3 files changed, 22 insertions(+), 15 deletions(-) create mode 100644 changelogs/fragments/1238-migrate-zos_unarchive.yml diff --git a/changelogs/fragments/1238-migrate-zos_unarchive.yml b/changelogs/fragments/1238-migrate-zos_unarchive.yml new file mode 100644 index 000000000..6cb8861c9 --- /dev/null +++ b/changelogs/fragments/1238-migrate-zos_unarchive.yml @@ -0,0 +1,4 @@ +trivial: + - zos_archive - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1238). +s \ No newline at end of file diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 9ab1409ca..fcbda95e1 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -392,14 +392,15 @@ import os import zipfile import tarfile +import traceback from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) try: from zoautil_py import datasets except Exception: - Datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) data_set_regex = r"(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}" @@ -646,8 +647,8 @@ def _compute_dest_data_set_size(self): """ # Get the size from the system - src_attributes = datasets.listing(self.src)[0] - # The size returned by listing is in bytes. + src_attributes = datasets.list_datasets(self.src)[0] + # The size returned by list_datasets is in bytes. source_size = int(src_attributes.total_space) if self.format == 'terse': source_size = int(source_size * 1.5) @@ -687,11 +688,9 @@ def _create_dest_data_set( if tmp_hlq: hlq = tmp_hlq else: - rc, hlq, err = self.module.run_command("hlq") - hlq = hlq.replace('\n', '') - cmd = "mvstmphelper {0}.RESTORE".format(hlq) - rc, temp_ds, err = self.module.run_command(cmd) - arguments.update(name=temp_ds.replace('\n', '')) + hlq = datasets.get_hlq() + temp_ds = datasets.tmp_name(high_level_qualifier=hlq) + arguments.update(name=temp_ds) if record_format is None: arguments.update(record_format="FB") if record_length is None: diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index c0b1fe293..28cc0d77d 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -281,6 +281,7 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, format): finally: hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") + @pytest.mark.uss def test_uss_unarchive_copy_to_remote(ansible_zos_module): try: @@ -370,7 +371,6 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec DATASET = get_tmp_ds_name(3) HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( @@ -379,6 +379,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec state="present", record_length=record_length, record_format=record_format, + replace=True ) # Create members if needed if data_set.get("dstype") in ["PDS", "PDSE"]: @@ -386,7 +387,8 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec hosts.all.zos_data_set( name=f"{DATASET}({member})", type="member", - state="present" + state="present", + replace=True ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW @@ -480,7 +482,6 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d DATASET = get_tmp_ds_name(3) HLQ = "ANSIBLE" # Clean env - hosts.all.zos_data_set(name=DATASET, state="absent") hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") # Create source data set hosts.all.zos_data_set( @@ -489,6 +490,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d state="present", record_length=record_length, record_format=record_format, + replace=True ) # Create members if needed if data_set.get("dstype") in ["PDS", "PDSE"]: @@ -496,7 +498,8 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d hosts.all.zos_data_set( name=f"{DATASET}({member})", type="member", - state="present" + state="present", + replace=True ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW @@ -962,6 +965,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") + @pytest.mark.ds @pytest.mark.parametrize( "format", [ From a21b18aba6cc022d211b693323ffbf1d36eda429 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Mon, 26 Feb 2024 09:55:32 -0700 Subject: [PATCH 309/495] [v1.10.0] [zos_mount] zos_mount ZOAU v1.3.0 migration (#1237) * Update tests * Add changelog fragment * Re-enabled zos_copy-dependent tests * Update copyright --- .../fragments/1237-migrate-zos_mount.yml | 4 ++++ .../functional/modules/test_zos_mount_func.py | 21 +------------------ 2 files changed, 5 insertions(+), 20 deletions(-) create mode 100644 changelogs/fragments/1237-migrate-zos_mount.yml diff --git a/changelogs/fragments/1237-migrate-zos_mount.yml b/changelogs/fragments/1237-migrate-zos_mount.yml new file mode 100644 index 000000000..d4787d42d --- /dev/null +++ b/changelogs/fragments/1237-migrate-zos_mount.yml @@ -0,0 +1,4 @@ +trivial: + - tests/functional/modules/test_zos_mount_func.py - migrate code to use + ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1237). diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 8883ddebc..1ec7c03f5 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2021, 2022 +# Copyright (c) IBM Corporation 2020 - 2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function @@ -9,22 +9,9 @@ import tempfile -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - data_set, -) - -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, -) - from ibm_zos_core.tests.helpers.volumes import Volume_Handler from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name -try: - from zoautil_py import Datasets -except Exception: - Datasets = MissingZOAUImport() - INITIAL_PRM_MEMBER = """/* Initial file to look like BPXPRM */ /* some settings at the top */ @@ -79,9 +66,6 @@ def create_sourcefile(hosts, volume): starter, thisfile, str(type(thisfile)) ) ) - # fs_du = data_set.DataSetUtils(thisfile) - # fs_exists = fs_du.exists() - # if fs_exists is False: hosts.all.shell( cmd="zfsadm define -aggregate " @@ -338,9 +322,6 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst assert srcfn in data assert "bpxtablecomment - try this" in data - # fs_du = data_set.DataSetUtils(back_dest_path) - # fs_exists = fs_du.exists() - # assert fs_exists finally: hosts.all.zos_mount( src=srcfn, From 8a6e2b87f911687cd1c570fc787dccc7ded1f8ec Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 26 Feb 2024 08:55:54 -0800 Subject: [PATCH 310/495] Update release notes for V3R1 (#1226) * Update release notes for V3R1 Signed-off-by: ddimatos <dimatos@gmail.com> * Update bug issue template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collaboration issue template Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc issue template Signed-off-by: ddimatos <dimatos@gmail.com> * Update enabler template Signed-off-by: ddimatos <dimatos@gmail.com> * Update feature template Signed-off-by: ddimatos <dimatos@gmail.com> * Update module template Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 116 ++++++++-------- .../ISSUE_TEMPLATE/collaboration_issue.yml | 127 +++++++++--------- .github/ISSUE_TEMPLATE/doc_issue.yml | 26 ++-- .github/ISSUE_TEMPLATE/enabler_issue.yml | 22 +-- .../enhancement_feature.issue.yml | 21 +-- .github/ISSUE_TEMPLATE/module_issue.yml | 2 +- docs/source/release_notes.rst | 24 ++-- 7 files changed, 177 insertions(+), 161 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index e03266e7b..2193cb615 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -1,6 +1,6 @@ name: Report a bug description: Request that a bug be reviewed. Complete all required fields. -title: "[Bug] <title> " +title: "[Bug] Enter description" labels: [Bug] assignees: - IBMAnsibleHelper @@ -13,22 +13,60 @@ body: options: - label: There are no existing issues. required: true - - type: checkboxes - id: valid-dependencies + - type: textarea + id: issue-description attributes: - label: Are the dependencies a supported version? - description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + label: Bug description + description: Describe the bug you are experiencing. + placeholder: | + Verbosity is encouraged, the more you share the better for us to understand. + 1. Include the steps to reproduce + 2. Include playbook if applicable + 3. Include screen captures of applicable + 4. Include expected vs actual results if applicable + validations: + required: true + - type: dropdown + id: collection-version + attributes: + label: IBM z/OS Ansible core Version + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + multiple: false options: - - label: The dependencies are supported. - required: true + - v1.12.0 + - v1.12.0-beta.1 + - v1.11.0 + - v1.11.0-beta.1 + - v1.10.0 + - v1.10.0-beta.1 + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 (default) + - v1.7.0 + - v1.6.0 + - v1.5.0 + - v1.4.1 + - v1.3.6 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + default: 8 + validations: + required: true - type: dropdown id: zoau-version attributes: label: IBM Z Open Automation Utilities - description: Which version of ZOAU are you using? + description: Which version of ZOAU are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: - - v1.2.5 + - v1.3.4 + - v1.3.3 + - v1.3.2 + - v1.3.1 + - v1.3.0 + - v1.2.5 (default) - v1.2.4 - v1.2.3 - v1.2.2 @@ -36,40 +74,23 @@ body: - v1.2.0 - v1.1.1 - v1.0.3 + default: 5 validations: required: true - type: dropdown id: python-version attributes: label: IBM Enterprise Python - description: Which version of IBM Enterprise Python are you using? + description: Which version of IBM Enterprise Python are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: + - v3.13.x - v3.12.x - - v3.11.x + - v3.11.x (default) - v3.10.x - v3.9.x - v3.8.x - validations: - required: true - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). - multiple: false - options: - - v1.8.0-beta.1 - - v1.7.0 - - v1.7.0-beta.1 - - v1.6.0 - - v1.6.0-beta.1 - - v1.5.0 - - v1.4.1 - - v1.3.6 - - v1.2.1 - - v1.1.0 - - v1.0.0 + default: 2 validations: required: true - type: dropdown @@ -79,27 +100,24 @@ body: description: What is the version of Ansible on the controller (`ansible --version`)? multiple: false options: - - latest - - v2.16.x + - v2.17.x + - v2.16.x (default) - v2.15.x - v2.14.x - - v2.13.x - - v2.12.x - - v2.11.x - - v2.9.x + default: 1 validations: required: true - type: dropdown id: zos-version attributes: label: z/OS version - description: What is the version of z/OS on the managed node? + description: What is the version of z/OS on the managed node? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: - - v3.1 - - v2.5 + - v3.1 (unsupported) + - v2.5 (default) - v2.4 - - v2.3 + default: 1 validations: required: false - type: dropdown @@ -110,6 +128,7 @@ body: multiple: true options: - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -129,21 +148,10 @@ body: - zos_ping - zos_script - zos_tso_command + - zos_unarchive + - zos_volume_init validations: required: false - - type: textarea - id: issue-description - attributes: - label: Bug description - description: Describe the bug you are experiencing. - placeholder: | - Verbosity is encouraged, the more you share the better for us to understand. - 1. Include the steps to reproduce - 2. Include playbook if applicable - 3. Include screen captures of applicable - 4. Include expected vs actual results if applicable - validations: - required: true - type: textarea id: issue-output attributes: diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index f601ce1e1..fb8ff3a00 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -23,24 +23,59 @@ body: required: false - label: No, support and service is involved. required: false - - type: checkboxes - id: valid-dependencies + - type: textarea + id: issue-description + attributes: + label: Collaboration description + description: Describe the collaboration issue. + placeholder: | + For example + 1. Working with IBM Enterprise Python to resolve issue xyz. + 2. Working with z/OS application team DFSMS to resolve xyz. + 3. Assisting IBM support to resolve an ibm_zos_copy issue. + validations: + required: true + - type: dropdown + id: collection-version attributes: - label: Are the dependencies a supported? - description: Please review the ZOAU and IBM Enterprise Python versions in the reference section of the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + label: IBM z/OS Ansible core Version + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + multiple: false options: - - label: Yes, the dependencies are supported. - required: false - - label: Not applicable to this collaboration. - required: false + - v1.12.0 + - v1.12.0-beta.1 + - v1.11.0 + - v1.11.0-beta.1 + - v1.10.0 + - v1.10.0-beta.1 + - v1.9.0 + - v1.9.0-beta.1 + - v1.8.0 (default) + - v1.7.0 + - v1.6.0 + - v1.5.0 + - v1.4.1 + - v1.3.6 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + default: 8 + validations: + required: false - type: dropdown id: zoau-version attributes: label: IBM Z Open Automation Utilities - description: Which version of ZOAU are you using? + description: Which version of ZOAU are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: - - v1.2.5 + - v1.3.4 + - v1.3.3 + - v1.3.2 + - v1.3.1 + - v1.3.0 + - v1.2.5 (default) - v1.2.4 - v1.2.3 - v1.2.2 @@ -48,45 +83,23 @@ body: - v1.2.0 - v1.1.1 - v1.0.3 + default: 5 validations: required: false - type: dropdown id: python-version attributes: label: IBM Enterprise Python - description: Which version of IBM Enterprise Python are you using? - multiple: true + description: Which version of IBM Enterprise Python are you using? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + multiple: false options: - - v3.14.x - v3.13.x - v3.12.x - - v3.11.x + - v3.11.x (default) - v3.10.x - v3.9.x - v3.8.x - validations: - required: false - - type: dropdown - id: collection-version - attributes: - label: IBM z/OS Ansible core Version - description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). - multiple: false - options: - - v1.9.0 - - v1.9.0-beta.1 - - v1.8.0 - - v1.8.0-beta.1 - - v1.7.0 - - v1.7.0-beta.1 - - v1.6.0 - - v1.6.0-beta.1 - - v1.5.0 - - v1.4.1 - - v1.3.6 - - v1.2.1 - - v1.1.0 - - v1.0.0 + default: 2 validations: required: false - type: dropdown @@ -96,27 +109,24 @@ body: description: What is the version of Ansible on the controller (`ansible --version`)? multiple: false options: - - latest - - v2.16.x + - v2.17.x + - v2.16.x (default) - v2.15.x - v2.14.x - - v2.13.x - - v2.12.x - - v2.11.x - - v2.9.x + default: 1 validations: required: false - type: dropdown id: zos-version attributes: label: z/OS version - description: What is the version of z/OS on the managed node? + description: What is the version of z/OS on the managed node? Please review the supported dependencies in the release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). multiple: false options: - - v3.1 - - v2.5 + - v3.1 (unsupported) + - v2.5 (default) - v2.4 - - v2.3 + default: 1 validations: required: false - type: dropdown @@ -127,6 +137,7 @@ body: multiple: true options: - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -146,23 +157,7 @@ body: - zos_ping - zos_script - zos_tso_command + - zos_unarchive + - zos_volume_init validations: - required: false - - type: textarea - id: issue-description - attributes: - label: Collaboration description - description: Describe the collaboration issue. - placeholder: | - For example - 1. Working with IBM Enterprise Python to resolve issue xyz. - 2. Working with z/OS application team DFSMS to resolve xyz. - 3. Assisting IBM support to resolve an ibm_zos_copy issue. - validations: - required: true - - - - - - + required: false \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index 38a8f1818..dcc6dfda2 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -31,33 +31,39 @@ body: id: collection-version attributes: label: IBM z/OS Ansible core Version - description: Which version of z/OS Ansible core collection are you reporting a documentation bug. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). + description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: + - v1.12.0 + - v1.12.0-beta.1 + - v1.11.0 + - v1.11.0-beta.1 + - v1.10.0 + - v1.10.0-beta.1 - v1.9.0 - v1.9.0-beta.1 - - v1.8.0 - - v1.8.0-beta.1 + - v1.8.0 (default) - v1.7.0 - - v1.7.0-beta.1 - v1.6.0 - - v1.6.0-beta.1 - v1.5.0 - v1.4.1 - v1.3.6 - - v1.2.1 - - v1.1.0 - - v1.0.0 + - v1.3.5 + - v1.3.3 + - v1.3.1 + - v1.3.0 + default: 8 validations: required: false - type: dropdown id: modules attributes: label: Ansible module - description: Select which modules are being reported in this doc issue. You can select more than one. + description: Select which modules are being reported in this bug. You can select more than one. multiple: true options: - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -77,5 +83,7 @@ body: - zos_ping - zos_script - zos_tso_command + - zos_unarchive + - zos_volume_init validations: required: false diff --git a/.github/ISSUE_TEMPLATE/enabler_issue.yml b/.github/ISSUE_TEMPLATE/enabler_issue.yml index d520148dc..c9584acfd 100644 --- a/.github/ISSUE_TEMPLATE/enabler_issue.yml +++ b/.github/ISSUE_TEMPLATE/enabler_issue.yml @@ -15,14 +15,23 @@ body: options: - label: There are no existing issues. required: true + - type: textarea + id: issue-description + attributes: + label: Enabler description + description: Describe the task. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true - type: dropdown id: modules attributes: label: Ansible module - description: Select which modules are being reported for this task. You can select more than one. + description: Select which modules are being reported in this bug. You can select more than one. multiple: true options: - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -42,13 +51,8 @@ body: - zos_ping - zos_script - zos_tso_command + - zos_unarchive + - zos_volume_init validations: required: false - - type: textarea - id: issue-description - attributes: - label: Enabler description - description: Describe the task, this is the equivalent of a agile story. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. - validations: - required: true + diff --git a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml index f190ee70c..98adbf65b 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_feature.issue.yml @@ -13,15 +13,23 @@ body: options: - label: There are no existing issues. required: true + - type: textarea + id: issue-description + attributes: + label: Enhancement or feature description + description: Describe the enhancement or feature you are requesting. + placeholder: Verbosity is encouraged, the more you share the better for us to understand. + validations: + required: true - type: dropdown id: modules attributes: label: Ansible module - description: Select which modules are being reported in this enhancement or feature. You can select more than one. + description: Select which modules are being reported in this bug. You can select more than one. multiple: true options: - - zos_archive - zos_apf + - zos_archive - zos_backup_restore - zos_blockinfile - zos_copy @@ -42,14 +50,7 @@ body: - zos_script - zos_tso_command - zos_unarchive - validations: - required: true - - type: textarea - id: issue-description - attributes: - label: Enhancement or feature description - description: Describe the enhancement or feature you are requesting. - placeholder: Verbosity is encouraged, the more you share the better for us to understand. + - zos_volume_init validations: required: true diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml index a7e7dcfa1..7723b85f1 100644 --- a/.github/ISSUE_TEMPLATE/module_issue.yml +++ b/.github/ISSUE_TEMPLATE/module_issue.yml @@ -1,6 +1,6 @@ name: Request a new module description: Request a new module be added to the collection. Complete all required fields. -title: "[Module] <title> " +title: "[Module] Enter description " labels: [Module] assignees: - IBMAnsibleHelper diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 6770aa879..726c1b64c 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -73,7 +73,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3. @@ -159,7 +159,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.4`_ (or later) but prior to version 1.3. @@ -226,7 +226,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. @@ -289,7 +289,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -405,7 +405,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ * Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. @@ -444,7 +444,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -594,7 +594,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ `3.8`_` - `3.9`_ * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -695,7 +695,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ v3.8.2 - `IBM Open Enterprise SDK for Python`_ v3.9.5 @@ -736,7 +736,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -782,7 +782,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -814,7 +814,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and @@ -937,7 +937,7 @@ Availability Reference --------- -* Supported by `z/OS®`_ V2R4 or later +* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ * Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later * Supported by IBM `Z Open Automation Utilities 1.1.0`_ and From 5f2b4cdae19d062470748c96dce2a9758ca0db53 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 26 Feb 2024 20:17:44 -0600 Subject: [PATCH 311/495] [Enabler][1137]Validate_module_zos_job_output_migration_1.3.0 (#1216) * Add manage exception for ZOAU 1.3 * Comment test on dependencies of zos job submit * Adapt test cases to new expections * Adapt test cases to new expections * Adapt test cases to new expections * Adapt test cases to new expections * Adapt test cases to new expections * Add fragment * Validate test_zos_job_output_with_job_submit * Add alias to exception * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add zos job submit work fine * Add validation * Add validation * Add time and validation * Add time and validation * Add time and validation * Add print * Add print * Add job * Add job * Fix job utils for output * Remove print * Add mesage to fail * Return test cases * Fix Typo * Add job output * Remove print * Move exeptions --- ...lidate_module_zos_job_output_migration.yml | 3 ++ plugins/module_utils/job.py | 28 +++++++++---------- plugins/modules/zos_job_output.py | 15 ++++++++++ .../modules/test_zos_job_output_func.py | 21 +++++++------- 4 files changed, 42 insertions(+), 25 deletions(-) create mode 100644 changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml diff --git a/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml b/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml new file mode 100644 index 000000000..65d3d3c08 --- /dev/null +++ b/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_output - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1216). diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 4a432d764..af96c6ab6 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -318,18 +318,18 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T for single_dd in list_of_dds: dd = {} - if "dataset" not in single_dd: + if "dd_name" not in single_dd: continue # If dd_name not None, only that specific dd_name should be returned if dd_name is not None: - if dd_name not in single_dd["dataset"]: + if dd_name not in single_dd["dd_name"]: continue else: - dd["ddname"] = single_dd["dataset"] + dd["ddname"] = single_dd["dd_name"] - if "recnum" in single_dd: - dd["record_count"] = single_dd["recnum"] + if "records" in single_dd: + dd["record_count"] = single_dd["records"] else: dd["record_count"] = None @@ -338,28 +338,28 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T else: dd["id"] = "?" - if "stepname" in single_dd: - dd["stepname"] = single_dd["stepname"] + if "step_name" in single_dd: + dd["stepname"] = single_dd["step_name"] else: dd["stepname"] = None if "procstep" in single_dd: dd["procstep"] = single_dd["procstep"] else: - dd["proctep"] = None + dd["procstep"] = None - if "length" in single_dd: - dd["byte_count"] = single_dd["length"] + if "record_length" in single_dd: + dd["byte_count"] = single_dd["record_length"] else: dd["byte_count"] = 0 tmpcont = None - if "stepname" in single_dd: - if "dataset" in single_dd: + if "step_name" in single_dd: + if "dd_name" in single_dd: tmpcont = jobs.read_output( entry.job_id, - single_dd["stepname"], - single_dd["dataset"] + single_dd["step_name"], + single_dd["dd_name"] ) dd["content"] = tmpcont.split("\n") diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 40c7d61d0..ed5a182d3 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -414,12 +414,20 @@ from ansible.module_utils.basic import AnsibleModule +import traceback +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + ZOAUImportError, +) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import ( job_output, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser ) +try: + from zoautil_py import exceptions as zoau_exceptions +except Exception: + zoau_exceptions = ZOAUImportError(traceback.format_exc()) def run_module(): @@ -461,6 +469,13 @@ def run_module(): results = {} results["jobs"] = job_output(job_id=job_id, owner=owner, job_name=job_name, dd_name=ddname) results["changed"] = False + except zoau_exceptions.JobFetchException as fetch_exception: + module.fail_json( + msg="ZOAU exception", + rc=fetch_exception.response.rc, + stdout=fetch_exception.response.stdout_response, + stderr=fetch_exception.response.stderr_response, + ) except Exception as e: module.fail_json(msg=repr(e)) diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 830828769..584cd6d6d 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -31,8 +31,6 @@ """ TEMP_PATH = "/tmp/jcl" -JOB_NOT_FOUND_MSG_TXT="The job with the name * could not be found." -JOB_NOT_FOUND_MSG_TXT_ID="The job with the job_id INVALID could not be found." def test_zos_job_output_no_job_id(ansible_zos_module): hosts = ansible_zos_module @@ -47,7 +45,8 @@ def test_zos_job_output_invalid_job_id(ansible_zos_module): results = hosts.all.zos_job_output(job_id="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT_ID + assert result.get("stderr") is not None + assert result.get("failed") is True def test_zos_job_output_no_job_name(ansible_zos_module): @@ -63,7 +62,7 @@ def test_zos_job_output_invalid_job_name(ansible_zos_module): results = hosts.all.zos_job_output(job_name="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get('job_name') == "INVALID" + assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None def test_zos_job_output_no_owner(ansible_zos_module): @@ -71,7 +70,7 @@ def test_zos_job_output_no_owner(ansible_zos_module): results = hosts.all.zos_job_output(owner="") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs") is None + assert result.get("msg") is not None def test_zos_job_output_invalid_owner(ansible_zos_module): @@ -79,7 +78,7 @@ def test_zos_job_output_invalid_owner(ansible_zos_module): results = hosts.all.zos_job_output(owner="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("ret_code").get("msg_txt") == JOB_NOT_FOUND_MSG_TXT + assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None def test_zos_job_output_reject(ansible_zos_module): @@ -100,10 +99,10 @@ def test_zos_job_output_job_exists(ansible_zos_module): ) jobs = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None ) - for job in jobs.contacted.values(): + print(job) assert job.get("jobs") is not None for job in jobs.contacted.values(): @@ -127,8 +126,8 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) - hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None + result = hosts.all.zos_job_submit( + src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None ) hosts.all.file(path=TEMP_PATH, state="absent") dd_name = "JESMSGLG" @@ -147,4 +146,4 @@ def test_zos_job_submit_job_id_and_owner_included(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_output(job_id="STC00*", owner="MASTER") for result in results.contacted.values(): - assert result.get("jobs") is not None + assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None From 73ba2c1174f4e22a4796f9c23072f62ea2ace497 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Tue, 27 Feb 2024 09:46:35 -0800 Subject: [PATCH 312/495] [v1.10.0] [zos_data_set] ZOAU 1.3 migration - zos_data_set (#1242) * remove deprecated 'wait=True' from call to zos_job_submit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add temporary fixes to module_util/data_set.py to catch DatasetVerificationError and strip '-' from parsed volser Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * refactor volser extraction for better readability Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add work-around for potentially errouneous DatasetVerificationError when a data set spanning multiple volumes is created Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 issue - remove blank line Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove error catch introduced during debugging Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update error message when DatasetVerificationError is raised Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DatasetCreateError definition to account for instance where no RC exists Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add indent to address pep8 issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- .../1242-zoau-migration-zos_data_set.yml | 3 ++ plugins/module_utils/data_set.py | 43 ++++++++++++++----- plugins/modules/zos_data_set.py | 2 +- .../modules/test_zos_data_set_func.py | 12 +++--- 4 files changed, 43 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1242-zoau-migration-zos_data_set.yml diff --git a/changelogs/fragments/1242-zoau-migration-zos_data_set.yml b/changelogs/fragments/1242-zoau-migration-zos_data_set.yml new file mode 100644 index 000000000..851783900 --- /dev/null +++ b/changelogs/fragments/1242-zoau-migration-zos_data_set.yml @@ -0,0 +1,3 @@ +trivial: + - zos_data_set - Refactor data_set module_util and functional tests for ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1242). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 33b1958b4..34346dc12 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -177,13 +177,14 @@ def ensure_present( changed = False if DataSet.data_set_cataloged(name): present = True + if not present: try: DataSet.create(**arguments) except DatasetCreateError as e: raise_error = True # data set exists on volume - if "Error Code: 0x4704" in e.msg: + if "DatasetVerificationError" in e.msg or "Error Code: 0x4704" in e.msg: present, changed = DataSet.attempt_catalog_if_necessary( name, volumes ) @@ -355,6 +356,7 @@ def data_set_cataloged(name, volumes=None): """ name = name.upper() + module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name) rc, stdout, stderr = module.run_command( @@ -386,9 +388,14 @@ def data_set_cataloged_volume_list(name): "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin ) delimiter = 'VOLSER------------' - arr = stdout.split(delimiter) - # A volume serial (VOLSER) is not always of fixed length, use ":x.find(' ')" here instead of arr[index]. - volume_list = list(set([x[:x.find(' ')] for x in arr[1:]])) + arr = stdout.split(delimiter)[1:] # throw away header + + # Volume serials (VOLSER) under 6 chars will have one or more leading '-'s due to the chosen delimiter. + # The volser is in between the beginning of each str and the first space. + # Strip away any leading '-'s, then split on the next whitespace and throw away the remaining in each str. + volume_list = [x.strip('-').split()[0] for x in arr] + + volume_list = list(set(volume_list)) # remove duplicates, order doesn't matter return volume_list @staticmethod @@ -1015,12 +1022,21 @@ def create( formatted_args = DataSet._build_zoau_args(**original_args) try: datasets.create(**formatted_args) - except (exceptions.ZOAUException, exceptions.DatasetVerificationError) as create_exception: + except exceptions.ZOAUException as create_exception: raise DatasetCreateError( name, create_exception.response.rc, create_exception.response.stdout_response + create_exception.response.stderr_response ) + except exceptions.DatasetVerificationError as e: + # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 + if len(volumes) > 1: + if DataSet.data_set_cataloged(name, volumes): + return 0 + raise DatasetCreateError( + name, + msg="Unable to verify the data set was created. Received DatasetVerificationError from ZOAU.", + ) # With ZOAU 1.3 we switched from getting a ZOAUResponse obj to a Dataset obj, previously we returned # response.rc now we just return 0 if nothing failed return 0 @@ -1778,12 +1794,19 @@ def __init__(self, data_set, rc): class DatasetCreateError(Exception): - def __init__(self, data_set, rc, msg=""): - self.msg = ( - 'An error occurred during creation of data set "{0}". RC={1}, {2}'.format( - data_set, rc, msg + def __init__(self, data_set, rc=None, msg=""): + if rc: + self.msg = ( + 'An error occurred during creation of data set "{0}". RC={1}, {2}'.format( + data_set, rc, msg + ) + ) + else: + self.msg = ( + 'An error occurred during creation of data set "{0}". {1}'.format( + data_set, msg + ) ) - ) super().__init__(self.msg) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 73af4acf1..8b0485826 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index f5568f55e..28882d9ce 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -160,7 +160,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True, wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", location="USS", wait_time_s=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -215,7 +215,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True + src=TEMP_PATH + "/SAMPLE", location="USS" ) # verify data set creation was successful for result in results.contacted.values(): @@ -260,7 +260,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True + src=TEMP_PATH + "/SAMPLE", location="USS" ) # verify data set creation was successful for result in results.contacted.values(): @@ -308,7 +308,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait=True + src=TEMP_PATH + "/SAMPLE", location="USS" ) # verify data set creation was successful for result in results.contacted.values(): @@ -345,7 +345,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) - results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") # verify data set creation was successful for result in results.contacted.values(): @@ -360,7 +360,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS", wait=True) + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") # verify data set creation was successful for result in results.contacted.values(): From cf123ae5a80938f88469055cb5c9811e7cd0a72f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 29 Feb 2024 11:47:36 -0600 Subject: [PATCH 313/495] Removed trailing char from changelog (#1266) --- changelogs/fragments/1238-migrate-zos_unarchive.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/changelogs/fragments/1238-migrate-zos_unarchive.yml b/changelogs/fragments/1238-migrate-zos_unarchive.yml index 6cb8861c9..8afe97d29 100644 --- a/changelogs/fragments/1238-migrate-zos_unarchive.yml +++ b/changelogs/fragments/1238-migrate-zos_unarchive.yml @@ -1,4 +1,3 @@ trivial: - zos_archive - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1238). -s \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/1238). \ No newline at end of file From 267ffa7ce29b56b6a9eaf784c13b418cc032bf02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 29 Feb 2024 13:19:25 -0600 Subject: [PATCH 314/495] [Enabler][Migration]Migrate_zos_blockinfile_and_lineinfile (#1256) * Add change of blockinfile * Check output * Add dataset option * Fix blockinfile * Remove test case with bug * Migrate lineinfile * Comment fail cases * Fix space * Fix documentation * Add fragment * Add correct dataset import * Add gh issue to lineinfile * Add gh issue to test * Remove force * Updated copyright years * Add explanation --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...Migrate_zos_blockinfile_and_lineinfile.yml | 4 + plugins/modules/zos_blockinfile.py | 62 +++--- plugins/modules/zos_lineinfile.py | 57 +++--- .../modules/test_zos_blockinfile_func.py | 40 ++-- .../modules/test_zos_lineinfile_func.py | 180 +++++++++--------- 5 files changed, 169 insertions(+), 174 deletions(-) create mode 100644 changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml diff --git a/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml b/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml new file mode 100644 index 000000000..e2e841e9c --- /dev/null +++ b/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml @@ -0,0 +1,4 @@ +trivial: + - zos_lineinfile - migrate code to use ZOAU v1.3.0. + - zos_blockinfile - migrate code to use ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1256). diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 7a2adf7cc..8fd9701da 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2023 +# Copyright (c) IBM Corporation 2020, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -325,17 +325,18 @@ """ import json +import traceback from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, backup as Backup) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) try: from zoautil_py import datasets except Exception: - Datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) # supported data set types @@ -379,14 +380,15 @@ def present(src, block, marker, ins_aft, ins_bef, encoding, force): - BOF - '*regex*' encoding: {str} -- Encoding of the src. - force: {str} -- If not empty passes the -f option to dmod cmd. + force: {bool} -- If not empty passes True option to dmod cmd. Returns: str -- Information in JSON format. keys: cmd: {str} -- dmod shell command found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the destination was modified. """ - return datasets.blockinfile(src, block=block, marker=marker, ins_aft=ins_aft, ins_bef=ins_bef, encoding=encoding, state=True, options=force, as_json=True) + return datasets.blockinfile(src, True, block=block, marker=marker, insert_after=ins_aft, + insert_before=ins_bef, encoding=encoding, force=force, as_json=True) def absent(src, marker, encoding, force): @@ -395,14 +397,14 @@ def absent(src, marker, encoding, force): src: {str} -- The z/OS USS file or data set to modify. marker: {str} -- Identifies the block to be removed. encoding: {str} -- Encoding of the src. - force: {str} -- If not empty passes the -f option to dmod cmd. + force: {bool} -- If not empty passes the value True option to dmod cmd. Returns: str -- Information in JSON format. keys: cmd: {str} -- dmod shell command found: {int} -- Number of matching regex pattern changed: {bool} -- Indicates if the destination was modified. """ - return datasets.blockinfile(src, marker=marker, encoding=encoding, state=False, options=force, as_json=True) + return datasets.blockinfile(src, False, marker=marker, encoding=encoding, force=force, as_json=True) def quotedString(string): @@ -412,12 +414,6 @@ def quotedString(string): return string.replace('"', "") -def quoted_string_output_json(string): - if not isinstance(string, str): - return string - return string.replace('"', "u'") - - def main(): module = AnsibleModule( argument_spec=dict( @@ -540,7 +536,6 @@ def main(): marker_begin = 'BEGIN' if not marker_end: marker_end = 'END' - force = '-f' if force else '' marker = "{0}\\n{1}\\n{2}".format(marker_begin, marker_end, marker) block = transformBlock(block, ' ', indentation) @@ -574,42 +569,31 @@ def main(): # state=present, insert/replace a block with matching regex pattern # state=absent, delete blocks with matching regex pattern if parsed_args.get('state') == 'present': - return_content = present(src, block, quotedString(marker), quotedString(ins_aft), quotedString(ins_bef), encoding, force) + return_content = present(src, block, marker, ins_aft, ins_bef, encoding, force) else: - return_content = absent(src, quotedString(marker), encoding, force) + return_content = absent(src, marker, encoding, force) stdout = return_content.stdout_response stderr = return_content.stderr_response rc = return_content.rc + stdout = stdout.replace('/d', '\\\\d') try: - # change the return string to be loadable by json.loads() - stdout = stdout.replace('/c\\', '/c\\\\') - stdout = stdout.replace('/a\\', '/a\\\\') - stdout = stdout.replace('/i\\', '/i\\\\') - stdout = stdout.replace('$ a\\', '$ a\\\\') - stdout = stdout.replace('1 i\\', '1 i\\\\') - if block: - stdout = stdout.replace(block, quoted_string_output_json(block)) - if ins_aft: - stdout = stdout.replace(ins_aft, quoted_string_output_json(ins_aft)) - if ins_bef: - stdout = stdout.replace(ins_bef, quoted_string_output_json(ins_bef)) # Try to extract information from stdout - ret = json.loads(stdout) - ret['cmd'] = ret['cmd'].replace("u'", '"') - - result['cmd'] = ret['cmd'] - result['changed'] = ret['changed'] - result['found'] = ret['found'] - # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case - # That information will be given with 'changed' and 'found' - if len(stderr): - result['stderr'] = str(stderr) - result['rc'] = rc + # The triple double quotes is required for special characters (/_) been scape + ret = json.loads("""{0}""".format(stdout)) except Exception: messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) if result.get('backup_name'): messageDict['backup_name'] = result['backup_name'] module.fail_json(**messageDict) + + result['cmd'] = ret['data']['commands'] + result['changed'] = ret['data']['changed'] + result['found'] = ret['data']['found'] + # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case + # That information will be given with 'changed' and 'found' + if len(stderr): + result['stderr'] = str(stderr) + result['rc'] = rc module.exit_json(**result) diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 6536509fd..a6576af12 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -275,18 +275,19 @@ sample: /path/to/file.txt.2015-02-03@04:15~ """ import json +import traceback from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, backup as Backup) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) try: from zoautil_py import datasets except Exception: - datasets = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) # supported data set types @@ -326,8 +327,8 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs src, line, regex=regexp, - ins_aft=ins_aft, - ins_bef=ins_bef, + insert_after=ins_aft, + insert_before=ins_bef, encoding=encoding, first_match=first_match, backref=backrefs, @@ -488,36 +489,36 @@ def main(): stdout = return_content.stdout_response stderr = return_content.stderr_response rc = return_content.rc + stdout = stdout.replace('/c\\', '/c\\\\') + stdout = stdout.replace('/a\\', '/a\\\\') + stdout = stdout.replace('/i\\', '/i\\\\') + stdout = stdout.replace('$ a\\', '$ a\\\\') + stdout = stdout.replace('1 i\\', '1 i\\\\') + stdout = stdout.replace('/d', '\\\\d') + if line: + stdout = stdout.replace(line, quotedString(line)) + if regexp: + stdout = stdout.replace(regexp, quotedString(regexp)) + if ins_aft: + stdout = stdout.replace(ins_aft, quotedString(ins_aft)) + if ins_bef: + stdout = stdout.replace(ins_bef, quotedString(ins_bef)) try: - # change the return string to be loadable by json.loads() - stdout = stdout.replace('/c\\', '/c\\\\') - stdout = stdout.replace('/a\\', '/a\\\\') - stdout = stdout.replace('/i\\', '/i\\\\') - stdout = stdout.replace('$ a\\', '$ a\\\\') - stdout = stdout.replace('1 i\\', '1 i\\\\') - if line: - stdout = stdout.replace(line, quotedString(line)) - if regexp: - stdout = stdout.replace(regexp, quotedString(regexp)) - if ins_aft: - stdout = stdout.replace(ins_aft, quotedString(ins_aft)) - if ins_bef: - stdout = stdout.replace(ins_bef, quotedString(ins_bef)) - # Try to extract information from return_content ret = json.loads(stdout) - result['cmd'] = ret['cmd'] - result['changed'] = ret['changed'] - result['found'] = ret['found'] - # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case - # That information will be given with 'changed' and 'found' - if len(stderr): - result['stderr'] = str(stderr) - result['rc'] = rc except Exception: messageDict = dict(msg="dsed return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) if result.get('backup_name'): messageDict['backup_name'] = result['backup_name'] module.fail_json(**messageDict) + + result['cmd'] = ret['cmd'] + result['changed'] = ret['changed'] + result['found'] = ret['found'] + # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case + # That information will be given with 'changed' and 'found' + if len(stderr): + result['stderr'] = str(stderr) + result['rc'] = rc module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 39d04639f..197bc9fa3 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -481,6 +481,7 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): params["path"] = full_path results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): + print(result) assert result.get("changed") == 1 results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): @@ -862,24 +863,25 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): finally: remove_uss_environment(ansible_zos_module) - -@pytest.mark.uss -def test_uss_block_insert_with_doublequotes(ansible_zos_module): - hosts = ansible_zos_module - params = dict(insertafter="sleep 30;", block='cat \"//OMVSADMI.CAT\"\ncat \"//OMVSADM.COPYMEM.TESTS\" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] - content = TEST_CONTENT_DOUBLEQUOTES - try: - set_uss_environment(ansible_zos_module, content, full_path) - params["path"] = full_path - results = hosts.all.zos_blockinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES - finally: - remove_uss_environment(ansible_zos_module) +# Test case base on bug of dataset.blockifile +# GH Issue #1258 +#@pytest.mark.uss +#def test_uss_block_insert_with_doublequotes(ansible_zos_module): +# hosts = ansible_zos_module +# params = dict(insertafter="sleep 30;", block='cat "//OMVSADMI.CAT"\ncat "//OMVSADM.COPYMEM.TESTS" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") +# full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] +# content = TEST_CONTENT_DOUBLEQUOTES +# try: +# set_uss_environment(ansible_zos_module, content, full_path) +# params["path"] = full_path +# results = hosts.all.zos_blockinfile(**params) +# for result in results.contacted.values(): +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat {0}".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES +# finally: +# remove_uss_environment(ansible_zos_module) @pytest.mark.uss diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 256a21c71..445c0edfe 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -674,93 +674,97 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): finally: remove_ds_environment(ansible_zos_module, ds_name) - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): - hosts = ansible_zos_module - ds_type = dstype - params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = TEST_CONTENT - try: - ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) - params["path"] = ds_full_name - results = hosts.all.zos_lineinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER - finally: - remove_ds_environment(ansible_zos_module, ds_name) - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): - hosts = ansible_zos_module - ds_type = dstype - params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = TEST_CONTENT - try: - ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) - params["path"] = ds_full_name - results = hosts.all.zos_lineinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE - finally: - remove_ds_environment(ansible_zos_module, ds_name) - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype): - hosts = ansible_zos_module - ds_type = dstype - params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = TEST_CONTENT - try: - ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) - params["path"] = ds_full_name - results = hosts.all.zos_lineinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH - finally: - remove_ds_environment(ansible_zos_module, ds_name) - - -@pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype): - hosts = ansible_zos_module - ds_type = dstype - params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = TEST_CONTENT - try: - ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) - params["path"] = ds_full_name - results = hosts.all.zos_lineinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) - for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH - finally: - remove_ds_environment(ansible_zos_module, ds_name) +#GH Issue #1244 +#@pytest.mark.ds +#@pytest.mark.parametrize("dstype", DS_TYPE) +#def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): +# hosts = ansible_zos_module +# ds_type = dstype +# params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") +# ds_name = get_tmp_ds_name() +# temp_file = "/tmp/" + ds_name +# content = TEST_CONTENT +# try: +# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) +# params["path"] = ds_full_name +# results = hosts.all.zos_lineinfile(**params) +# for result in results.contacted.values(): +# print(result) +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER +# finally: +# remove_ds_environment(ansible_zos_module, ds_name) + +#GH Issue #1244 +#@pytest.mark.ds +#@pytest.mark.parametrize("dstype", DS_TYPE) +#def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): +# hosts = ansible_zos_module +# ds_type = dstype +# params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") +# ds_name = get_tmp_ds_name() +# temp_file = "/tmp/" + ds_name +# content = TEST_CONTENT +# try: +# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) +# params["path"] = ds_full_name +# results = hosts.all.zos_lineinfile(**params) +# for result in results.contacted.values(): +# print(result) +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE +# finally: +# remove_ds_environment(ansible_zos_module, ds_name) + +#GH Issue #1244 +#@pytest.mark.ds +#@pytest.mark.parametrize("dstype", DS_TYPE) +#def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype): +# hosts = ansible_zos_module +# ds_type = dstype +# params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") +# ds_name = get_tmp_ds_name() +# temp_file = "/tmp/" + ds_name +# content = TEST_CONTENT +# try: +# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) +# params["path"] = ds_full_name +# results = hosts.all.zos_lineinfile(**params) +# for result in results.contacted.values(): +# print(result) +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH +# finally: +# remove_ds_environment(ansible_zos_module, ds_name) + +#GH Issue #1244 +#@pytest.mark.ds +#@pytest.mark.parametrize("dstype", DS_TYPE) +#def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype): +# hosts = ansible_zos_module +# ds_type = dstype +# params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") +# ds_name = get_tmp_ds_name() +# temp_file = "/tmp/" + ds_name +# content = TEST_CONTENT +# try: +# ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) +# params["path"] = ds_full_name +# results = hosts.all.zos_lineinfile(**params) +# for result in results.contacted.values(): +# print(result) +# assert result.get("changed") == 1 +# results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) +# for result in results.contacted.values(): +# assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH +# finally: +# remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds From 9d39fb282e58a58b642bb2352478ba1272added5 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 1 Mar 2024 10:45:42 -0600 Subject: [PATCH 315/495] [zos_apf] Standardize ZOAU Imports (#1257) * Stadarized ZOAU Imports * Added missing import * Restored backup * Updated changelog * Update zos_apf.py --- changelogs/fragments/1257-zoau-import-zos_apf.yml | 3 +++ plugins/modules/zos_apf.py | 7 ++++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/1257-zoau-import-zos_apf.yml diff --git a/changelogs/fragments/1257-zoau-import-zos_apf.yml b/changelogs/fragments/1257-zoau-import-zos_apf.yml new file mode 100644 index 000000000..71b46ba1b --- /dev/null +++ b/changelogs/fragments/1257-zoau-import-zos_apf.yml @@ -0,0 +1,3 @@ +trivial: + - zos_apf - Updated ZOAU imports from the module to capture traceback. + (https://github.com/ansible-collections/ibm_zos_core/pull/1257). diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index bba3beb19..117801306 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -297,13 +297,14 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser, data_set, backup as Backup) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) +import traceback try: from zoautil_py import zsystem except Exception: - Datasets = MissingZOAUImport() + zsystem = ZOAUImportError(traceback.format_exc()) # supported data set types From c365197b7c99e8e4a8881d53b497608ea74a4270 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 1 Mar 2024 10:49:40 -0600 Subject: [PATCH 316/495] [Enhancement] [Doc scripts] Modifed doc scripts to ensure compatibility between MacOS and GNU sed commands (#1202) * Modifed doc scripts to ensure compatibility between MacOs and GNU sed commands * Added changelog --- changelogs/fragments/1202-doc-gen-script-portability.yml | 4 ++++ docs/scripts/post-zos_apf.sh | 2 +- docs/scripts/pre-template.sh | 6 +++--- 3 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1202-doc-gen-script-portability.yml diff --git a/changelogs/fragments/1202-doc-gen-script-portability.yml b/changelogs/fragments/1202-doc-gen-script-portability.yml new file mode 100644 index 000000000..3c2e6ddbb --- /dev/null +++ b/changelogs/fragments/1202-doc-gen-script-portability.yml @@ -0,0 +1,4 @@ +trivial: + - docs/scripts - Change to sed "-i" in place option which ensures compatibility between MacOS + and GNU versions of sed command. + (https://github.com/ansible-collections/ibm_zos_core/pull/1202). diff --git a/docs/scripts/post-zos_apf.sh b/docs/scripts/post-zos_apf.sh index befcaecfe..d7ce5472b 100755 --- a/docs/scripts/post-zos_apf.sh +++ b/docs/scripts/post-zos_apf.sh @@ -28,5 +28,5 @@ SCRIPT_DIR=`dirname "$0"` CURR_PATH=`pwd` # Delete any temporary index RST if [[ -f $CURR_PATH/source/modules/zos_apf.rst ]]; then - sed -i '' "s/\> \\*\//\> \\\*\//g" $CURR_PATH/source/modules/zos_apf.rst + sed -i'' -e "s/\> \\*\//\> \\\*\//g" $CURR_PATH/source/modules/zos_apf.rst fi diff --git a/docs/scripts/pre-template.sh b/docs/scripts/pre-template.sh index ca35775d9..3a2ac16d4 100755 --- a/docs/scripts/pre-template.sh +++ b/docs/scripts/pre-template.sh @@ -27,6 +27,6 @@ template_doc_source=`ansible-config dump|grep DEFAULT_MODULE_PATH| cut -d'=' -f2|sed 's/[][]//g' | tr -d \'\" |sed 's/modules/doc_fragments\/template.py/g'` cp $template_doc_source $template_doc_source.tmp -sed -i '' "s/\"\\\\n\"/'\\\\\\\\n'/g" $template_doc_source -sed -i '' "s/\"\\\\r\"/'\\\\\\\\r'/g" $template_doc_source -sed -i '' "s/\"\\\\r\\\\n\"/'\\\\\\\\r\\\\\\\\n'/g" $template_doc_source +sed -i'' -e "s/\"\\\\n\"/'\\\\\\\\n'/g" $template_doc_source +sed -i'' -e "s/\"\\\\r\"/'\\\\\\\\r'/g" $template_doc_source +sed -i'' -e "s/\"\\\\r\\\\n\"/'\\\\\\\\r\\\\\\\\n'/g" $template_doc_source From f9d53342d46a24267c259f62dc7fa07b74d842cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 4 Mar 2024 10:50:20 -0600 Subject: [PATCH 317/495] [Enabler][1104]migrate_zos_backup_restore (#1265) * Migrated zos_backup_restore to 1.3 * Fixed sanity * Added hlq * Updated backup * Migrate blockinfile * Fix trouble * Fix trash left and new way to test module * Fix variable name * Ensure Diferent HLQ * Add fragment * Change copyright notation * Fix documentation * Get better code and documentation --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1265_Migrate_zos_backup_restore.yml | 7 +++ plugins/modules/zos_backup_restore.py | 63 +++++++++++++------ .../modules/test_zos_backup_restore.py | 39 +++++++----- 3 files changed, 74 insertions(+), 35 deletions(-) create mode 100644 changelogs/fragments/1265_Migrate_zos_backup_restore.yml diff --git a/changelogs/fragments/1265_Migrate_zos_backup_restore.yml b/changelogs/fragments/1265_Migrate_zos_backup_restore.yml new file mode 100644 index 000000000..9afe4afc3 --- /dev/null +++ b/changelogs/fragments/1265_Migrate_zos_backup_restore.yml @@ -0,0 +1,7 @@ +trivial: + - zos_backup_restore - Refactor zos_backup_restore module and functional tests for ZOAU v1.3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/1265). +minor_changes: + - zos_backup_restore - Add tmp_hlq option to the user interface to override the default high level qualifier + (HLQ) for temporary and backup. + (https://github.com/ansible-collections/ibm_zos_core/pull/1265). \ No newline at end of file diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 080c7efab..3185652e1 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -186,6 +186,14 @@ - Defaults to running user's username. type: str required: false + tmp_hlq: + description: + - Override the default high level qualifier (HLQ) for temporary and backup + data sets. + - The default HLQ is the Ansible user that executes the module and if + that is not available, then the value of C(TMPHLQ) is used. + required: false + type: str """ RETURN = r"""""" @@ -312,15 +320,16 @@ from re import match, search, IGNORECASE from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingZOAUImport, + ZOAUImportError, ) from os import path - +import traceback try: - from zoautil_py import datasets, exceptions + from zoautil_py import datasets + from zoautil_py import exceptions as zoau_exceptions except ImportError: - datasets = MissingZOAUImport() - exceptions = MissingZOAUImport() + datasets = ZOAUImportError(traceback.format_exc()) + zoau_exceptions = ZOAUImportError(traceback.format_exc()) def main(): @@ -347,6 +356,7 @@ def main(): sms_storage_class=dict(type="str", required=False), sms_management_class=dict(type="str", required=False), hlq=dict(type="str", required=False), + tmp_hlq=dict(type="str", required=False), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=False) @@ -365,6 +375,7 @@ def main(): sms_storage_class = params.get("sms_storage_class") sms_management_class = params.get("sms_management_class") hlq = params.get("hlq") + tmp_hlq = params.get("tmp_hlq") if operation == "backup": backup( @@ -380,6 +391,7 @@ def main(): space_type=space_type, sms_storage_class=sms_storage_class, sms_management_class=sms_management_class, + tmp_hlq=tmp_hlq, ) else: restore( @@ -396,6 +408,7 @@ def main(): space_type=space_type, sms_storage_class=sms_storage_class, sms_management_class=sms_management_class, + tmp_hlq=tmp_hlq, ) result["changed"] = True @@ -444,6 +457,7 @@ def parse_and_validate_args(params): sms_storage_class=dict(type=sms_type, required=False), sms_management_class=dict(type=sms_type, required=False), hlq=dict(type=hlq_type, default=hlq_default, dependencies=["operation"]), + tmp_hlq=dict(type=hlq_type, required=False), ) parsed_args = BetterArgParser(arg_defs).parse_args(params) @@ -466,6 +480,7 @@ def backup( space_type, sms_storage_class, sms_management_class, + tmp_hlq, ): """Backup data sets or a volume to a new data set or unix file. @@ -482,10 +497,11 @@ def backup( space_type (str): The unit of measurement to use when defining data set space. sms_storage_class (str): Specifies the storage class to use. sms_management_class (str): Specifies the management class to use. + tmp_hlq (str): Specifies the tmp hlq to temporary datasets """ args = locals() zoau_args = to_dzip_args(**args) - datasets.zip(**zoau_args) + datasets.dzip(**zoau_args) def restore( @@ -502,6 +518,7 @@ def restore( space_type, sms_storage_class, sms_management_class, + tmp_hlq, ): """[summary] @@ -523,23 +540,26 @@ def restore( space_type (str): The unit of measurement to use when defining data set space. sms_storage_class (str): Specifies the storage class to use. sms_management_class (str): Specifies the management class to use. + tmp_hlq (str): : Specifies the tmp hlq to temporary datasets """ args = locals() zoau_args = to_dunzip_args(**args) - response = datasets._unzip(**zoau_args) + output = "" + try: + rc = datasets.dunzip(**zoau_args) + except zoau_exceptions.ZOAUException as dunzip_exception: + output = dunzip_exception.response.stdout_response + output = output + dunzip_exception.response.stderr_response + rc = get_real_rc(output) failed = False - true_rc = response.rc - if response.rc > 0: - output = response.stdout_response + response.stderr_response - true_rc = get_real_rc(output) or true_rc - if true_rc > 0 and true_rc <= 4: + if rc > 0 and rc <= 4: if recover is not True: failed = True - elif true_rc > 0: + elif rc > 4: failed = True if failed: - raise exceptions.ZOAUException( - "%s,RC=%s" % (response.stderr_response, response.rc) + raise zoau_exceptions.ZOAUException( + "{0}, RC={1}".format(output, rc) ) @@ -631,7 +651,7 @@ def hlq_default(contents, dependencies): """ hlq = None if dependencies.get("operation") == "restore": - hlq = datasets.hlq() + hlq = datasets.get_hlq() return hlq @@ -791,6 +811,10 @@ def to_dzip_args(**kwargs): if kwargs.get("space_type"): size += kwargs.get("space_type") zoau_args["size"] = size + + if kwargs.get("tmp_hlq"): + zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq")) + return zoau_args @@ -844,7 +868,10 @@ def to_dunzip_args(**kwargs): zoau_args["size"] = size if kwargs.get("hlq"): - zoau_args["hlq"] = kwargs.get("hlq") + zoau_args["high_level_qualifier"] = kwargs.get("hlq") + + if kwargs.get("tmp_hlq"): + zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq")) return zoau_args diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 1b44ec124..a35750b63 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -26,7 +26,7 @@ DATA_SET_QUALIFIER2 = "{0}.PRIVATE.TESTDS2" DATA_SET_BACKUP_LOCATION = "MY.BACKUP" UNIX_BACKUP_LOCATION = "/tmp/mybackup.dzp" -NEW_HLQ = "NEWHLQ" +NEW_HLQ = "TMPHLQ" DATA_SET_RESTORE_LOCATION = DATA_SET_QUALIFIER.format(NEW_HLQ) DATA_SET_RESTORE_LOCATION2 = DATA_SET_QUALIFIER2.format(NEW_HLQ) @@ -73,6 +73,10 @@ def delete_data_set(hosts, data_set_name): def delete_file(hosts, path): hosts.all.file(path=path, state="absent") +def delete_remnants(hosts): + hosts.all.shell(cmd="drm 'ANSIBLE.*'") + hosts.all.shell(cmd="drm 'TEST.*'") + hosts.all.shell(cmd="drm 'TMPHLQ.*'") def get_unused_volume_serial(hosts): found = False @@ -87,7 +91,6 @@ def is_volume(hosts, volume): results = hosts.all.shell(cmd="vtocls ${volume}") failed = False for result in results.contacted.values(): - print(result) if result.get("failed", False) is True: failed = True if result.get("rc", 0) > 0: @@ -130,7 +133,6 @@ def assert_data_set_or_file_does_not_exist(hosts, name): def assert_data_set_exists(hosts, data_set_name): results = hosts.all.shell("dls '{0}'".format(data_set_name.upper())) for result in results.contacted.values(): - print(result) found = search( "^{0}$".format(data_set_name), result.get("stdout"), IGNORECASE | MULTILINE ) @@ -213,6 +215,7 @@ def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover) finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -249,6 +252,7 @@ def test_backup_of_data_set_when_backup_dest_exists( finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -269,6 +273,7 @@ def test_backup_and_restore_of_data_set( ): hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + new_hlq = NEW_HLQ try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -282,19 +287,21 @@ def test_backup_and_restore_of_data_set( overwrite=overwrite, recover=recover, ) + if not overwrite: + new_hlq = "TEST" assert_module_did_not_fail(results) assert_data_set_or_file_exists(hosts, backup_name) results = hosts.all.zos_backup_restore( operation="restore", backup_name=backup_name, - hlq=NEW_HLQ, + hlq=new_hlq, overwrite=overwrite, ) assert_module_did_not_fail(results) finally: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -348,8 +355,8 @@ def test_backup_and_restore_of_data_set_various_space_measurements( assert_module_did_not_fail(results) finally: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -397,8 +404,8 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( assert_module_failed(results) finally: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts) def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): @@ -428,15 +435,13 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): backup_name=DATA_SET_BACKUP_LOCATION, overwrite=True, recover=True, - hlq=NEW_HLQ, ) assert_module_did_not_fail(results) finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module): @@ -473,9 +478,8 @@ def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module): finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): @@ -485,7 +489,6 @@ def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) create_sequential_data_set_with_contents( @@ -514,9 +517,9 @@ def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) @pytest.mark.parametrize( @@ -545,7 +548,7 @@ def test_restore_of_data_set_when_backup_does_not_exist( finally: delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, backup_name) - + delete_remnants(hosts) @pytest.mark.parametrize( "backup_name", @@ -574,7 +577,7 @@ def test_backup_of_data_set_when_data_set_does_not_exist( finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) - + delete_remnants(hosts) def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): hosts = ansible_zos_module @@ -597,6 +600,7 @@ def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): @@ -629,6 +633,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_remnants(hosts) # def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module): From ba43c842d15272683c70a336f9bc93ff13c215d0 Mon Sep 17 00:00:00 2001 From: ketankelkar <ktnklkr@gmail.com> Date: Mon, 4 Mar 2024 15:49:47 -0800 Subject: [PATCH 318/495] [1.10.0] [zos_data_set] Bugfix/1268/quick fix len of volumes work around (#1270) * add None check for volumes in create function Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update changelog fragment name Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- .../fragments/1270-quick-fix-len-of-volumes-work-around.yml | 5 +++++ plugins/module_utils/data_set.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml diff --git a/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml b/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml new file mode 100644 index 000000000..1f6ba201d --- /dev/null +++ b/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml @@ -0,0 +1,5 @@ +trivial: + - module_utils/data_set.py - len(volme) was always called on receiving + DatasetVerificationError from Dataset.create() even though volumes=None was + a valid possible outcome. The fix adds a null check to the conditional. + (https://github.com/ansible-collections/ibm_zos_core/pull/1270). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 34346dc12..613bc9973 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1030,7 +1030,7 @@ def create( ) except exceptions.DatasetVerificationError as e: # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 - if len(volumes) > 1: + if volumes and len(volumes) > 1: if DataSet.data_set_cataloged(name, volumes): return 0 raise DatasetCreateError( From d3e14f3717d453b1749462e513d9174e4c452339 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 6 Mar 2024 16:22:30 -0700 Subject: [PATCH 319/495] [v1.10.0] [zos_job_submit] Handling of non-UTF8 chars in job output (#1261) * Added test to validate handling of non-UTF8 chars * Add changelog fragment * Clean up new test * Add try-except block when reading a job's output * Remove commented code * Update changelog fragment * Change job queried in test --- .../1261-job-submit-non-utf8-chars.yml | 9 ++ plugins/module_utils/job.py | 27 +++--- .../modules/test_zos_job_query_func.py | 2 +- .../modules/test_zos_job_submit_func.py | 85 ++++++++++++++++++- 4 files changed, 109 insertions(+), 14 deletions(-) create mode 100644 changelogs/fragments/1261-job-submit-non-utf8-chars.yml diff --git a/changelogs/fragments/1261-job-submit-non-utf8-chars.yml b/changelogs/fragments/1261-job-submit-non-utf8-chars.yml new file mode 100644 index 000000000..7f322afe4 --- /dev/null +++ b/changelogs/fragments/1261-job-submit-non-utf8-chars.yml @@ -0,0 +1,9 @@ +bugfixes: + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. + (https://github.com/ansible-collections/ibm_zos_core/pull/1261). +trivial: + - zos_job_submit - add test case to validate a bugfix in ZOAU v1.3.0 that + handles non-UTF8 characters correctly in a job's output. + (https://github.com/ansible-collections/ibm_zos_core/pull/1261). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index af96c6ab6..1afdaed55 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -356,11 +356,21 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T tmpcont = None if "step_name" in single_dd: if "dd_name" in single_dd: - tmpcont = jobs.read_output( - entry.job_id, - single_dd["step_name"], - single_dd["dd_name"] - ) + # In case ZOAU fails when reading the job output, we'll + # add a message to the user telling them of this. + # ZOAU cannot read partial output from a job, so we + # have to make do with nothing from this step if it fails. + try: + tmpcont = jobs.read_output( + entry.job_id, + single_dd["step_name"], + single_dd["dd_name"] + ) + except UnicodeDecodeError: + tmpcont = ( + "Non-printable UTF-8 characters were present in this output. " + "Please access it manually." + ) dd["content"] = tmpcont.split("\n") job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) @@ -393,13 +403,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["ret_code"]["msg_code"] = None job["ret_code"]["code"] = None - # if len(list_of_dds) > 0: - # The duration should really only be returned for job submit but the code - # is used job_output as well, for now we can ignore this point unless - # we want to offer a wait_time_s for job output which might be reasonable. - # Note: Moved this to the upper time loop, so it should always be populated. - # job["duration"] = duration - final_entries.append(job) if not final_entries: final_entries = _job_not_found(job_id, owner, job_name, "unavailable") diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 8c1f170ed..ee7b03157 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -114,7 +114,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): def test_zos_job_id_query_short_ids_func(ansible_zos_module): hosts = ansible_zos_module - qresults = hosts.all.zos_job_query(job_id="STC003") + qresults = hosts.all.zos_job_query(job_id="STC00002") for qresult in qresults.contacted.values(): assert qresult.get("jobs") is not None diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 9de3e992a..0694cdfa0 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -255,10 +255,56 @@ // """ -JCL_FULL_INPUT="""//HLQ0 JOB MSGLEVEL=(1,1), +JCL_FULL_INPUT = """//HLQ0 JOB MSGLEVEL=(1,1), // MSGCLASS=A,CLASS=A,NOTIFY=&SYSUID //STEP1 EXEC PGM=BPXBATCH,PARM='PGM /bin/sleep 5'""" +C_SRC_INVALID_UTF8 = """#include <stdio.h> +int main() +{ + unsigned char a=0x64; + unsigned char b=0x2A; + unsigned char c=0xB8; + unsigned char d=0xFF; + unsigned char e=0x81; + unsigned char f=0x82; + unsigned char g=0x83; + unsigned char h=0x00; + printf("Value of a: Hex: %X, character: %c",a,a); + printf("Value of b: Hex: %X, character: %c",b,b); + printf("Value of c: Hex: %X, character: %c",c,c); + printf("Value of d: Hex: %X, character: %c",d,d); + printf("Value of a: Hex: %X, character: %c",e,e); + printf("Value of b: Hex: %X, character: %c",f,f); + printf("Value of c: Hex: %X, character: %c",g,g); + printf("Value of d: Hex: %X, character: %c",h,h); + return 0; +} +""" + +JCL_INVALID_UTF8_CHARS_EXC = """//* +//****************************************************************************** +//* Job that runs a C program that returns characters outside of the UTF-8 range +//* expected by Python. This job tests a bugfix present in ZOAU v1.3.0 onwards +//* that deals properly with these chars. +//* The JCL needs to be formatted to give it the directory where the C program +//* is located. +//****************************************************************************** +//NOEBCDIC JOB (T043JM,JM00,1,0,0,0),'NOEBCDIC - JRM', +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=&SYSUID +//NOPRINT EXEC PGM=BPXBATCH +//STDPARM DD * +SH ( +cd {0}; +./noprint; +exit 0; +) +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +// +""" + TEMP_PATH = "/tmp/jcl" DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" @@ -712,3 +758,40 @@ def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): assert re.search(r'error ? ?', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" + + +# This test case is related to the following GitHub issues: +# - https://github.com/ansible-collections/ibm_zos_core/issues/677 +# - https://github.com/ansible-collections/ibm_zos_core/issues/972 +# - https://github.com/ansible-collections/ibm_zos_core/issues/1160 +# - https://github.com/ansible-collections/ibm_zos_core/issues/1255 +def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): + try: + hosts = ansible_zos_module + + # Copy C source and compile it. + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/noprint.c".format(quote(C_SRC_INVALID_UTF8), TEMP_PATH) + ) + hosts.all.shell(cmd="xlc -o {0}/noprint {0}/noprint.c") + + # Create local JCL and submit it. + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_INVALID_UTF8_CHARS_EXC.format(TEMP_PATH)) + + results = hosts.all.zos_job_submit( + src=tmp_file.name, + location="LOCAL", + wait_time_s=15 + ) + + for result in results.contacted.values(): + # We shouldn't get an error now that ZOAU handles invalid/unprintable + # UTF-8 chars correctly. + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True + finally: + hosts.all.file(path=TEMP_PATH, state="absent") From 9799ab1ac452acd548f18fa158fa54752648b77a Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 8 Mar 2024 10:11:07 -0600 Subject: [PATCH 320/495] [v1.10.0] [Documentation] Replaced path to src in zos_archive and zos_unarchive documentation (#1286) * Replaced path to src in zos_archive and zos_unarchive documentation * Added changelog --- .../1286-update-zos_archive-zos_unarchive-docs.yml | 5 +++++ plugins/modules/zos_archive.py | 10 +++++----- plugins/modules/zos_unarchive.py | 14 +++++++------- 3 files changed, 17 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml diff --git a/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml b/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml new file mode 100644 index 000000000..ef213b06f --- /dev/null +++ b/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml @@ -0,0 +1,5 @@ +trivial: + - zos_archive - Updated examples to use path instead of src. + (https://github.com/ansible-collections/ibm_zos_core/pull/1286). + - zos_unarchive - Updated examples and return dict to use path instead of src. + (https://github.com/ansible-collections/ibm_zos_core/pull/1286). \ No newline at end of file diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 959d263d9..951b6bc87 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -325,7 +325,7 @@ # Simple archive - name: Archive file into a tar zos_archive: - path: /tmp/archive/foo.txt + src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: name: tar @@ -333,7 +333,7 @@ # Archive multiple files - name: Compress list of files into a zip zos_archive: - path: + src: - /tmp/archive/foo.txt - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip @@ -343,7 +343,7 @@ # Archive one data set into terse - name: Compress data set into a terse zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -351,7 +351,7 @@ # Use terse with different options - name: Compress data set into a terse, specify pack algorithm and use adrdssu zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -362,7 +362,7 @@ # Use a pattern to store - name: Compress data set pattern using xmit zos_archive: - path: "USER.ARCHIVE.*" + src: "USER.ARCHIVE.*" exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index fcbda95e1..e9b17766c 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -323,14 +323,14 @@ # Simple extract - name: Copy local tar file and unpack it on the managed z/OS node. zos_unarchive: - path: "./files/archive_folder_test.tar" + src: "./files/archive_folder_test.tar" format: name: tar # use include - name: Unarchive a bzip file selecting only a file to unpack. zos_unarchive: - path: "/tmp/test.bz2" + src: "/tmp/test.bz2" format: name: bz2 include: @@ -339,7 +339,7 @@ # Use exclude - name: Unarchive a terse data set and excluding data sets from unpacking. zos_unarchive: - path: "USER.ARCHIVE.RESULT.TRS" + src: "USER.ARCHIVE.RESULT.TRS" format: name: terse exclude: @@ -349,7 +349,7 @@ # List option - name: List content from XMIT zos_unarchive: - path: "USER.ARCHIVE.RESULT.XMIT" + src: "USER.ARCHIVE.RESULT.XMIT" format: name: xmit format_options: @@ -358,14 +358,14 @@ ''' RETURN = r''' -path: +src: description: - File path or data set name unarchived. + File path or data set name unpacked. type: str returned: always dest_path: description: - - Destination path where archive was extracted. + - Destination path where archive was unpacked. type: str returned: always targets: From 068a1a521e00ae8079f4ebe67cff8d510b28580b Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 11 Mar 2024 13:34:51 -0700 Subject: [PATCH 321/495] [v1.10.0-beta.1][port forward] Documentation to update zos_ping about the deprecated scp in OpenSSH 9 or later. (#1295) * Update zos_ping to note OpenSSH deprecation of SCP Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragement after cherry-pick Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/1295-doc-zos_ping-scp.yml | 7 +++++++ docs/source/modules/zos_ping.rst | 12 ++++++++++++ plugins/modules/zos_ping.py | 12 +++++++++++- plugins/modules/zos_ping.rexx | 4 ++-- 4 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/1295-doc-zos_ping-scp.yml diff --git a/changelogs/fragments/1295-doc-zos_ping-scp.yml b/changelogs/fragments/1295-doc-zos_ping-scp.yml new file mode 100644 index 000000000..a9477150d --- /dev/null +++ b/changelogs/fragments/1295-doc-zos_ping-scp.yml @@ -0,0 +1,7 @@ +trivial: + - zos_ping - Update zos_ping documentation to instruct users how + to fall back to legacy SCP when using OpenSSH 9.0 or later. + (https://github.com/ansible-collections/ibm_zos_core/pull/1295). + - zos_ping - Update zos_ping REXX source to check for python + version 3.10 or later. + (https://github.com/ansible-collections/ibm_zos_core/pull/1295). \ No newline at end of file diff --git a/docs/source/modules/zos_ping.rst b/docs/source/modules/zos_ping.rst index a9a959dfe..a4405b473 100644 --- a/docs/source/modules/zos_ping.rst +++ b/docs/source/modules/zos_ping.rst @@ -40,10 +40,22 @@ Examples +Notes +----- +.. note:: + This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry ``scp_extra_args="-O"`` into the ini file named ``ansible.cfg``. +See Also +-------- + +.. seealso:: + + - :ref:`ansible.builtin.ssh_module` + + Return Values diff --git a/plugins/modules/zos_ping.py b/plugins/modules/zos_ping.py index eb44740e8..6de0cccf0 100644 --- a/plugins/modules/zos_ping.py +++ b/plugins/modules/zos_ping.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -31,6 +31,16 @@ - "Blake Becker (@blakeinate)" - "Demetrios Dimatos (@ddimatos)" options: {} +notes: + - This module is written in REXX and relies on the SCP protocol to transfer the source to + the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. + Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers + are no longer treated as text and are transferred as binary preserving the source files + encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, + you can instruct SSH to use SCP by adding the entry C(scp_extra_args="-O") into the ini + file named C(ansible.cfg). +seealso: +- module: ansible.builtin.ssh """ EXAMPLES = r""" diff --git a/plugins/modules/zos_ping.rexx b/plugins/modules/zos_ping.rexx index a4fd53340..a881146b0 100644 --- a/plugins/modules/zos_ping.rexx +++ b/plugins/modules/zos_ping.rexx @@ -62,7 +62,7 @@ Parse Arg argFile . pythonName = 'Python' majVersionPython = 3 -minVersionPython = 8 +minVersionPython = 10 warningJsonList = '' If (argFile = '') Then Do @@ -85,7 +85,7 @@ If (rc <> 0 | returnCode <> HWTJ_OK) Then Do failModule(errmsg, "", retC) End -/* Check for Python version >= 3.8 eg: 'Python 3.8.2' */ +/* Check for Python version >= 3.8 eg: 'Python 3.10.0' */ retC = bpxwunix('python3 --version', out., err.) If (err.0 > 0) Then Do Do index=1 To err.0 From 02b49be1ff50aa054bd86c5c47d1eca0dfd09e7e Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 11 Mar 2024 13:35:53 -0700 Subject: [PATCH 322/495] [v1.10.0-beta.1][port forward] Add chained command example to zos_tso_command (#1293) * Add chained command example with folding scalar and chomp Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright year Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Updated zos_tso_command doc Signed-off-by: ddimatos <dimatos@gmail.com> * Updted changelog fragment PR Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../1292-doc-zos_tso_command-example.yml | 4 +++ docs/source/modules/zos_tso_command.rst | 33 ++++++++++------- plugins/modules/zos_tso_command.py | 36 +++++++++++-------- 3 files changed, 46 insertions(+), 27 deletions(-) create mode 100644 changelogs/fragments/1292-doc-zos_tso_command-example.yml diff --git a/changelogs/fragments/1292-doc-zos_tso_command-example.yml b/changelogs/fragments/1292-doc-zos_tso_command-example.yml new file mode 100644 index 000000000..6ed868be7 --- /dev/null +++ b/changelogs/fragments/1292-doc-zos_tso_command-example.yml @@ -0,0 +1,4 @@ +trivial: + - zos_tso_command - Added an example on how to chain multiple TSO commands such + that they are invoked together when dependent on each other. + (https://github.com/ansible-collections/ibm_zos_core/pull/1293). \ No newline at end of file diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index 816a859e7..f3cdb0254 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -55,27 +55,34 @@ Examples .. code-block:: yaml+jinja - - name: Execute TSO commands to allocate a new dataset + - name: Execute TSO commands to allocate a new dataset. zos_tso_command: - commands: - - alloc da('TEST.HILL3.TEST') like('TEST.HILL3') - - delete 'TEST.HILL3.TEST' + commands: + - alloc da('TEST.HILL3.TEST') like('TEST.HILL3') + - delete 'TEST.HILL3.TEST' - - name: Execute TSO command list user TESTUSER to obtain TSO information + - name: Execute TSO command List User (LU) for TESTUSER to obtain TSO information. zos_tso_command: - commands: - - LU TESTUSER + commands: + - LU TESTUSER - - name: Execute TSO command to list dataset data (allow 4 for no dataset listed or cert found) + - name: Execute TSO command List Dataset (LISTDSD) and allow for maximum return code of 4. zos_tso_command: - commands: - - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC - max_rc: 4 + commands: + - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC + max_rc: 4 - name: Execute TSO command to run explicitly a REXX script from a data set. zos_tso_command: - commands: - - EXEC HLQ.DATASET.REXX exec + commands: + - EXEC HLQ.DATASET.REXX exec + + - name: Chain multiple TSO commands into one invocation using semicolons. + zos_tso_command: + commands: >- + ALLOCATE DDNAME(IN1) DSNAME('HLQ.PDSE.DATA.SRC(INPUT)') SHR; + ALLOCATE DDNAME(OUT1) DSNAME('HLQ.PDSE.DATA.DEST(OUTPUT)') SHR; + OCOPY INDD(IN1) OUTDD(OUT1) BINARY; diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 28b033a90..6c2cb6ef6 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -26,6 +26,7 @@ author: - "Xiao Yuan Ma (@bjmaxy)" - "Rich Parker (@richp405)" + - "Demetrios Dimatos (@ddimatos)" options: commands: description: @@ -94,27 +95,34 @@ """ EXAMPLES = r""" -- name: Execute TSO commands to allocate a new dataset +- name: Execute TSO commands to allocate a new dataset. zos_tso_command: - commands: - - alloc da('TEST.HILL3.TEST') like('TEST.HILL3') - - delete 'TEST.HILL3.TEST' + commands: + - alloc da('TEST.HILL3.TEST') like('TEST.HILL3') + - delete 'TEST.HILL3.TEST' -- name: Execute TSO command list user TESTUSER to obtain TSO information +- name: Execute TSO command List User (LU) for TESTUSER to obtain TSO information. zos_tso_command: - commands: - - LU TESTUSER + commands: + - LU TESTUSER -- name: Execute TSO command to list dataset data (allow 4 for no dataset listed or cert found) +- name: Execute TSO command List Dataset (LISTDSD) and allow for maximum return code of 4. zos_tso_command: - commands: - - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC - max_rc: 4 + commands: + - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC + max_rc: 4 - name: Execute TSO command to run a REXX script explicitly from a data set. zos_tso_command: - commands: - - EXEC HLQ.DATASET.REXX exec + commands: + - EXEC HLQ.DATASET.REXX exec + +- name: Chain multiple TSO commands into one invocation using semicolons. + zos_tso_command: + commands: >- + ALLOCATE DDNAME(IN1) DSNAME('HLQ.PDSE.DATA.SRC(INPUT)') SHR; + ALLOCATE DDNAME(OUT1) DSNAME('HLQ.PDSE.DATA.DEST(OUTPUT)') SHR; + OCOPY INDD(IN1) OUTDD(OUT1) BINARY; """ from ansible.module_utils.basic import AnsibleModule From 1c8259210b0b200e2dceaac2c3d1aac1ffff963a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Wed, 13 Mar 2024 10:15:15 -0600 Subject: [PATCH 323/495] [Enabler][995 996]Remove_local_charset_from_zos_fetch (#1298) * First iteration * Remove latest sanity cases * Fix sanity * Remove to last local_charset left * Add fragment * Add comment of explanation * Change changelog --- .../1298-Remove_local_charset_from_zos_fetch.yml | 3 +++ plugins/action/zos_fetch.py | 14 +++++++++----- plugins/modules/zos_fetch.py | 10 ++++++---- tests/sanity/ignore-2.14.txt | 2 -- tests/sanity/ignore-2.15.txt | 2 -- tests/sanity/ignore-2.16.txt | 2 -- 6 files changed, 18 insertions(+), 15 deletions(-) create mode 100644 changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml diff --git a/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml b/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml new file mode 100644 index 000000000..ca1ea840e --- /dev/null +++ b/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Remove argument not documented. + (https://github.com/ansible-collections/ibm_zos_core/pull/1298). \ No newline at end of file diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 087c70953..611922bf3 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019-2023 +# Copyright (c) IBM Corporation 2019 - 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -107,7 +107,7 @@ def run(self, tmp=None, task_vars=None): src = self._task.args.get('src') dest = self._task.args.get('dest') - encoding = self._task.args.get('encoding') + encoding = self._task.args.get('encoding', None) flat = _process_boolean(self._task.args.get('flat'), default=False) is_binary = _process_boolean(self._task.args.get('is_binary')) ignore_sftp_stderr = _process_boolean( @@ -219,9 +219,13 @@ def run(self, tmp=None, task_vars=None): # Execute module on remote host # # ********************************************************** # new_module_args = self._task.args.copy() - new_module_args.update( - dict(local_charset=encode.Defaults.get_default_system_charset()) - ) + encoding_to = None + if encoding: + encoding_to = encoding.get("to", None) + if encoding is None or encoding_to is None: + new_module_args.update( + dict(encoding=dict(to=encode.Defaults.get_default_system_charset())) + ) remote_path = None try: fetch_res = self._execute_module( diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 2b32f0760..dc4bc8071 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -584,7 +584,6 @@ def run_module(): validate_checksum=dict(required=False, default=True, type="bool"), encoding=dict(required=False, type="dict"), ignore_sftp_stderr=dict(type="bool", default=False, required=False), - local_charset=dict(type="str"), tmp_hlq=dict(required=False, type="str", default=None), ) ) @@ -606,7 +605,7 @@ def run_module(): tmp_hlq=dict(type='qualifier_or_empty', required=False, default=None), ) - if not module.params.get("encoding") and not module.params.get("is_binary"): + if not module.params.get("encoding").get("from") and not module.params.get("is_binary"): mvs_src = data_set.is_data_set(src) remote_charset = encode.Defaults.get_default_system_charset() @@ -614,10 +613,13 @@ def run_module(): "from": encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET if mvs_src else remote_charset, - "to": module.params.get("local_charset"), + "to": module.params.get("encoding").get("to"), } - if module.params.get("encoding"): + # We check encoding 'from' and 'to' because if the user pass both arguments of encoding, + # we honor those but encoding 'to' is an argument that the code obtain any time. + # Encoding will not be null and will generate problems as encoding 'from' could came empty. + if module.params.get("encoding").get("from") and module.params.get("encoding").get("to"): module.params.update( dict( from_encoding=module.params.get("encoding").get("from"), diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 89cf4db51..55477a2d0 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -10,8 +10,6 @@ plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # License plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 89cf4db51..55477a2d0 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -10,8 +10,6 @@ plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # License plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 89cf4db51..55477a2d0 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -10,8 +10,6 @@ plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # License plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_fetch.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From ce2551abd7724701e557be0af583f2de92ff5047 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 13 Mar 2024 13:48:20 -0400 Subject: [PATCH 324/495] [v1.10.0] [Enabler] [zos_data_set] Enabler/992/fixsanityfourin110 (#1285) * catch up changes for core 1.10 changelog addition zos_data set to correct the choices value in type, space_type test module changed to request only UPPER types * record_format values, plus aliases (data_class, format, size) also capitalized examples and changed case-insensitive to sensitive in docs * added result output to test to see what failed in creation data_set: missed a comma in a choices entry * indentation error in documentation * arg name had a choice parameter (paste-o) submit call in a test had a 'wait' parameter * confusion with sms_storage_class aliase data_class versus sms_data_class with no alias * commented out catalog before delete actions in testing that pair (catalog, then absent) seems to consistently fail * pulled old print_results, moved to present when cataloged final test * changing exception handling in data_set which was accessing non-existing members * re-enabled pre-catalog, moved print_resp to line 326 * re-printing @166 on creation * change 'space_type' to default=M, so code matches docs * added default record_format to 'FB' * forcing record_format to FB if none * adding printresult do create when absent for details * needed to eliminate length check on 'volumes' which can legit be a nonetype * corrected output header in test, added secondary default to zos_data_set/record_type * expanded results @ 416 to show both creation run results. * testing changing vsam (esds) record to F record format * cleaned inline comment, removed esds from data set creation list * re-elaborating output on cat/uncat/recat testing * removed rrds as well as esds types * updated test for 413, and old_aliases to use 2 vars removed default=F for vsam type * re-enabled complext ds types, and added logging enabler to test system * expanded exception on dataset creation to show calling params * printing ensure present 253 * dumping formatted params in exception handler * changing record format enforcement * forcing the blank record_type for vsam dataset types * expanded settings replacement to after arg parser removed vvv printouts and logging import * added redundant value check, so mutually exclusive values are checked before and after arg parsing. * allow record size in batch, because it will get cleared out before use * changing 'size' from an alias to an optional parameter, because the type changed * corrected missing commas * added print to create/delete in batch * removed 'size' parameter, and removed 'old args' test and repair routines. * added print back into 184 to make sure we tripped on a junk file * correcting double-creation of data set final tests * correction of result->results for a test loop. * removed extra output from test * corrected sanity fragment to include PR# removed extra/debug output from data_set * changed exception handler in data_set to match new exception class. --- changelogs/fragments/992-fix-sanity4to6.yml | 7 + plugins/module_utils/data_set.py | 4 +- plugins/modules/zos_data_set.py | 176 ++++++++++++------ .../modules/test_zos_data_set_func.py | 97 +++++----- tests/sanity/ignore-2.10.txt | 2 - tests/sanity/ignore-2.11.txt | 3 - tests/sanity/ignore-2.12.txt | 3 - tests/sanity/ignore-2.13.txt | 3 - tests/sanity/ignore-2.14.txt | 3 - tests/sanity/ignore-2.15.txt | 3 - tests/sanity/ignore-2.16.txt | 3 - tests/sanity/ignore-2.9.txt | 3 - 12 files changed, 171 insertions(+), 136 deletions(-) create mode 100644 changelogs/fragments/992-fix-sanity4to6.yml diff --git a/changelogs/fragments/992-fix-sanity4to6.yml b/changelogs/fragments/992-fix-sanity4to6.yml new file mode 100644 index 000000000..3d9637c63 --- /dev/null +++ b/changelogs/fragments/992-fix-sanity4to6.yml @@ -0,0 +1,7 @@ +trivial: + - zos_data_set.py - Corrected references to input variable definitions + (https://github.com/ansible-collections/ibm_zos_core/pull/1285). + - data_set.py - Updated exception handler to match what was returned. + (https://github.com/ansible-collections/ibm_zos_core/pull/1285). + - test_zos_data_set_func.py - Removed test of discontinued function. + (https://github.com/ansible-collections/ibm_zos_core/pull/1285). diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 613bc9973..3bd502858 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1022,11 +1022,11 @@ def create( formatted_args = DataSet._build_zoau_args(**original_args) try: datasets.create(**formatted_args) - except exceptions.ZOAUException as create_exception: + except exceptions._ZOAUExtendableException as create_exception: raise DatasetCreateError( name, create_exception.response.rc, - create_exception.response.stdout_response + create_exception.response.stderr_response + create_exception.response.stdout_response + "\n" + create_exception.response.stderr_response ) except exceptions.DatasetVerificationError as e: # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 8b0485826..1969462c3 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -24,7 +24,9 @@ description: - Create, delete and set attributes of data sets. - When forcing data set replacement, contents will not be preserved. -author: "Blake Becker (@blakeinate)" +author: + - "Blake Becker (@blakeinate)" + - "Rich Parker (@richp405)" options: name: description: @@ -109,7 +111,7 @@ description: - The data set type to be used when creating a data set. (e.g C(pdse)) - C(MEMBER) expects to be used with an existing partitioned data set. - - Choices are case-insensitive. + - Choices are case-sensitive. required: false type: str choices: @@ -157,7 +159,7 @@ record_format: description: - The format of the data set. (e.g C(FB)) - - Choices are case-insensitive. + - Choices are case-sensitive. - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or I(type=ZFS) then I(record_format=None), these types do not have a default I(record_format). @@ -171,6 +173,8 @@ - F type: str default: FB + aliases: + - format sms_storage_class: description: - The storage class for an SMS-managed dataset. @@ -179,6 +183,8 @@ - Note that all non-linear VSAM datasets are SMS-managed. type: str required: false + aliases: + - data_class sms_data_class: description: - The data class for an SMS-managed dataset. @@ -370,7 +376,7 @@ description: - The data set type to be used when creating a data set. (e.g C(PDSE)) - C(MEMBER) expects to be used with an existing partitioned data set. - - Choices are case-insensitive. + - Choices are case-sensitive. required: false type: str choices: @@ -418,7 +424,7 @@ record_format: description: - The format of the data set. (e.g C(FB)) - - Choices are case-insensitive. + - Choices are case-sensitive. - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or I(type=ZFS) then I(record_format=None), these types do not have a default I(record_format). @@ -432,6 +438,8 @@ - F type: str default: FB + aliases: + - format sms_storage_class: description: - The storage class for an SMS-managed dataset. @@ -440,6 +448,8 @@ - Note that all non-linear VSAM datasets are SMS-managed. type: str required: false + aliases: + - data_class sms_data_class: description: - The data class for an SMS-managed dataset. @@ -539,7 +549,7 @@ - name: Create a sequential data set if it does not exist zos_data_set: name: someds.name.here - type: seq + type: SEQ state: present - name: Create a PDS data set if it does not exist @@ -548,26 +558,26 @@ type: pds space_primary: 5 space_type: M - record_format: fba + record_format: FBA record_length: 25 - name: Attempt to replace a data set if it exists zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 replace: yes - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 volumes: "222222" replace: yes @@ -575,19 +585,19 @@ - name: Create an ESDS data set if it does not exist zos_data_set: name: someds.name.here - type: esds + type: ESDS - name: Create a KSDS data set if it does not exist zos_data_set: name: someds.name.here - type: ksds + type: KSDS key_length: 8 key_offset: 0 - name: Create an RRDS data set with storage class MYDATA if it does not exist zos_data_set: name: someds.name.here - type: rrds + type: RRDS sms_storage_class: mydata - name: Delete a data set if it exists @@ -632,7 +642,7 @@ type: PDS space_primary: 5 space_type: M - record_format: fb + record_format: FB replace: yes - name: someds.name.here1(member1) type: MEMBER @@ -799,7 +809,7 @@ def space_type(contents, dependencies): """Validates provided data set unit of space is valid. Returns the unit of space.""" if dependencies.get("state") == "absent": - return None + return "M" if contents is None: return None match = re.fullmatch(r"(M|G|K|TRK|CYL)", contents, re.IGNORECASE) @@ -865,9 +875,9 @@ def record_format(contents, dependencies): """Validates data set format is valid. Returns uppercase data set format.""" if dependencies.get("state") == "absent": - return None + return "FB" if contents is None: - return None + return "FB" formats = "|".join(DATA_SET_FORMATS) if not re.fullmatch(formats, contents, re.IGNORECASE): raise ValueError( @@ -986,33 +996,7 @@ def perform_data_set_operations(name, state, **extra_args): return changed -def fix_old_size_arg(params): - """ for backwards compatibility with old styled size argument """ - match = None - if params.get("size"): - match = re.fullmatch( - r"([1-9][0-9]*)(M|G|K|TRK|CYL)", str(params.get("size")), re.IGNORECASE - ) - if not match: - raise ValueError( - 'Value {0} is invalid for size argument. Valid size measurements are "K", "M", "G", "TRK" or "CYL".'.format( - str(params.get("size")) - ) - ) - if params.get("space_primary"): - match = re.fullmatch( - r"([1-9][0-9]*)(M|G|K|TRK|CYL)", - str(params.get("space_primary")), - re.IGNORECASE, - ) - if match: - params["space_primary"] = int(match.group(1)) - params["space_type"] = match.group(2) - return params - - def parse_and_validate_args(params): - params = fix_old_size_arg(params) arg_defs = dict( # Used for batch data set args @@ -1030,9 +1014,18 @@ def parse_and_validate_args(params): default="present", choices=["present", "absent", "cataloged", "uncataloged"], ), - type=dict(type=data_set_type, required=False, dependencies=["state"]), + type=dict( + type=data_set_type, + required=False, + dependencies=["state"], + choices=DATA_SET_TYPES, + ), space_type=dict( - type=space_type, required=False, dependencies=["state"] + type=space_type, + required=False, + dependencies=["state"], + choices=["K", "M", "G", "CYL", "TRK"], + default="M", ), space_primary=dict(type="int", required=False, dependencies=["state"]), space_secondary=dict( @@ -1042,7 +1035,9 @@ def parse_and_validate_args(params): type=record_format, required=False, dependencies=["state"], + choices=["FB", "VB", "FBA", "VBA", "U", "F"], aliases=["format"], + default="FB", ), sms_management_class=dict( type=sms_class, required=False, dependencies=["state"] @@ -1114,14 +1109,22 @@ def parse_and_validate_args(params): choices=["present", "absent", "cataloged", "uncataloged"], ), type=dict(type=data_set_type, required=False, dependencies=["state"]), - space_type=dict(type=space_type, required=False, dependencies=["state"]), + space_type=dict( + type=space_type, + required=False, + dependencies=["state"], + choices=["K", "M", "G", "CYL", "TRK"], + default="M", + ), space_primary=dict(type="int", required=False, dependencies=["state"]), space_secondary=dict(type="int", required=False, dependencies=["state"]), record_format=dict( type=record_format, required=False, dependencies=["state"], + choices=["FB", "VB", "FBA", "VBA", "U", "F"], aliases=["format"], + default="FB", ), sms_management_class=dict( type=sms_class, required=False, dependencies=["state"] @@ -1179,7 +1182,7 @@ def parse_and_validate_args(params): # ["batch", "space_type"], # ["batch", "space_primary"], # ["batch", "space_secondary"], - ["batch", "record_format"], + # ["batch", "record_format"], ["batch", "sms_management_class"], ["batch", "sms_storage_class"], ["batch", "sms_data_class"], @@ -1218,11 +1221,27 @@ def run_module(): default="present", choices=["present", "absent", "cataloged", "uncataloged"], ), - type=dict(type="str", required=False, default="PDS"), - space_type=dict(type="str", required=False, default="M"), - space_primary=dict(type="int", required=False, aliases=["size"], default=5), + type=dict( + type="str", + required=False, + default="PDS", + choices=DATA_SET_TYPES, + ), + space_type=dict( + type="str", + required=False, + default="M", + choices=["K", "M", "G", "CYL", "TRK"], + ), + space_primary=dict(type="int", required=False, default=5), space_secondary=dict(type="int", required=False, default=3), - record_format=dict(type="str", required=False, aliases=["format"], default="FB"), + record_format=dict( + type="str", + required=False, + aliases=["format"], + default="FB", + choices=["FB", "VB", "FBA", "VBA", "U", "F"], + ), sms_management_class=dict(type="str", required=False), # I know this alias is odd, ZOAU used to document they supported # SMS data class when they were actually passing as storage class @@ -1267,11 +1286,27 @@ def run_module(): default="present", choices=["present", "absent", "cataloged", "uncataloged"], ), - type=dict(type="str", required=False, default="PDS"), - space_type=dict(type="str", required=False, default="M"), - space_primary=dict(type="raw", required=False, aliases=["size"], default=5), + type=dict( + type="str", + required=False, + default="PDS", + choices=DATA_SET_TYPES, + ), + space_type=dict( + type="str", + required=False, + default="M", + choices=["K", "M", "G", "CYL", "TRK"], + ), + space_primary=dict(type="int", required=False, default=5), space_secondary=dict(type="int", required=False, default=3), - record_format=dict(type="str", required=False, aliases=["format"], default="FB"), + record_format=dict( + type="str", + required=False, + aliases=["format"], + choices=["FB", "VB", "FBA", "VBA", "U", "F"], + default="FB" + ), sms_management_class=dict(type="str", required=False), # I know this alias is odd, ZOAU used to document they supported # SMS data class when they were actually passing as storage class @@ -1319,6 +1354,7 @@ def run_module(): # This evaluation will always occur as a result of the limitation on the # better arg parser, this will serve as a solution for now and ensure # the non-batch and batch arguments are correctly set + # This section is copied down inside if/check_mode false, so it modifies after the arg parser if module.params.get("batch") is not None: for entry in module.params.get("batch"): if entry.get('type') is not None and entry.get("type").upper() in DATA_SET_TYPES_VSAM: @@ -1340,7 +1376,9 @@ def run_module(): elif module.params.get("type") is not None: if module.params.get("type").upper() in DATA_SET_TYPES_VSAM: # For VSAM types set the value to nothing and let the code manage it - module.params["record_format"] = None + # module.params["record_format"] = None + if module.params.get("record_format") is not None: + del module.params["record_format"] if not module.check_mode: try: @@ -1353,6 +1391,30 @@ def run_module(): result["names"] = [d.get("name", "") for d in data_set_param_list] for data_set_params in data_set_param_list: + # This *appears* redundant, bit the parse_and_validate reinforces the default value for record_type + if data_set_params.get("batch") is not None: + for entry in data_set_params.get("batch"): + if entry.get('type') is not None and entry.get("type").upper() in DATA_SET_TYPES_VSAM: + entry["record_format"] = None + if data_set_params.get("type") is not None: + data_set_params["type"] = None + if data_set_params.get("state") is not None: + data_set_params["state"] = None + if data_set_params.get("space_type") is not None: + data_set_params["space_type"] = None + if data_set_params.get("space_primary") is not None: + data_set_params["space_primary"] = None + if data_set_params.get("space_secondary") is not None: + data_set_params["space_secondary"] = None + if data_set_params.get("replace") is not None: + data_set_params["replace"] = None + if data_set_params.get("record_format") is not None: + data_set_params["record_format"] = None + else: + if data_set_params.get("type").upper() in DATA_SET_TYPES_VSAM: + if data_set_params.get("record_format") is not None: + data_set_params["record_format"] = None + # remove unnecessary empty batch argument result["changed"] = perform_data_set_operations( **data_set_params diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 28882d9ce..0167c1b83 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -27,13 +27,14 @@ # TODO: determine if data set names need to be more generic for testcases # TODO: add additional tests to check additional data set creation parameter combinations + data_set_types = [ - ("pds"), - ("seq"), - ("pdse"), - ("esds"), - ("rrds"), - ("lds"), + ("PDS"), + ("SEQ"), + ("PDSE"), + ("ESDS"), + ("RRDS"), + ("LDS"), ] TEMP_PATH = "/tmp/jcl" @@ -152,9 +153,9 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst volume_1 = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - hosts.all.zos_data_set( - name=dataset, state="cataloged", volumes=volume_1 - ) + # hosts.all.zos_data_set( + # name=dataset, state="cataloged", volumes=volume_1 + # ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -163,24 +164,28 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst src=TEMP_PATH + "/SAMPLE", location="USS", wait_time_s=30 ) # verify data set creation was successful + for result in results.contacted.values(): if(result.get("jobs")[0].get("ret_code") is None): submitted_job_id = result.get("jobs")[0].get("job_id") assert submitted_job_id is not None results = hosts.all.zos_job_output(job_id=submitted_job_id) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + # verify first uncatalog was performed results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True # verify second uncatalog shows uncatalog already performed results = hosts.all.zos_data_set(name=dataset, state="uncataloged") + for result in results.contacted.values(): assert result.get("changed") is False # recatalog the data set results = hosts.all.zos_data_set( name=dataset, state="cataloged", volumes=volume_1 ) + for result in results.contacted.values(): assert result.get("changed") is True # verify second catalog shows catalog already performed @@ -207,9 +212,9 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s volume_1 = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - hosts.all.zos_data_set( - name=dataset, state="cataloged", volumes=volume_1 - ) + # hosts.all.zos_data_set( + # name=dataset, state="cataloged", volumes=volume_1 + # ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -234,6 +239,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s results = hosts.all.zos_data_set( name=dataset, state="present", volumes=volume_1 ) + for result in results.contacted.values(): assert result.get("changed") is True finally: @@ -252,9 +258,9 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ volume = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - hosts.all.zos_data_set( - name=dataset, state="cataloged", volumes=volume - ) + # hosts.all.zos_data_set( + # name=dataset, state="cataloged", volumes=volume + # ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -300,9 +306,9 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy volume_1 = volumes.get_available_vol() hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set( - name=dataset, state="cataloged", volumes=volume_1 - ) + # hosts.all.zos_data_set( + # name=dataset, state="cataloged", volumes=volume_1 + # ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -339,13 +345,13 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans volume_2 = volumes.get_available_vol() hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) + # hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) - results =hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") # verify data set creation was successful for result in results.contacted.values(): @@ -368,11 +374,14 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="absent") - # ensure data set absent - results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) + # ensure second data set absent + results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_2) for result in results.contacted.values(): assert result.get("changed") is True + # ensure first data set absent + hosts.all.zos_data_set(name=dataset, state="cataloged") + results = hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) for result in results.contacted.values(): assert result.get("changed") is True @@ -401,7 +410,7 @@ def test_data_set_creation_when_present_replace(ansible_zos_module, dstype): try: hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set( + results = hosts.all.zos_data_set( name=dataset, state="present", type=dstype, replace=True ) results = hosts.all.zos_data_set( @@ -460,7 +469,7 @@ def test_batch_data_set_creation_and_deletion(ansible_zos_module): results = hosts.all.zos_data_set( batch=[ {"name": dataset, "state": "absent"}, - {"name": dataset, "type": "pds", "state": "present"}, + {"name": dataset, "type": "PDS", "state": "present"}, {"name": dataset, "state": "absent"}, ] ) @@ -477,11 +486,11 @@ def test_batch_data_set_and_member_creation(ansible_zos_module): dataset = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( batch=[ - {"name": dataset, "type": "pds", "directory_blocks": 5}, - {"name": dataset + "(newmem1)", "type": "member"}, + {"name": dataset, "type": "PDS", "directory_blocks": 5}, + {"name": dataset + "(newmem1)", "type": "MEMBER"}, { "name": dataset + "(newmem2)", - "type": "member", + "type": "MEMBER", "state": "present", }, {"name": dataset, "state": "absent"}, @@ -525,7 +534,7 @@ def test_data_member_force_delete(ansible_zos_module): DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) # set up: # create pdse - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="PDSE", replace=True) for result in results.contacted.values(): assert result.get("changed") is True @@ -534,25 +543,25 @@ def test_data_member_force_delete(ansible_zos_module): batch=[ { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "member", + "type": "MEMBER", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), - "type": "member", + "type": "MEMBER", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_3), - "type": "member", + "type": "MEMBER", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_4), - "type": "member", + "type": "MEMBER", "state": "present", "replace": True, }, @@ -769,27 +778,6 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, vo hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") -def test_data_set_old_aliases(ansible_zos_module, volumes_on_systems): - volumes = Volume_Handler(volumes_on_systems) - volume_1 = volumes.get_available_vol() - try: - hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, - state="present", - format="fb", - size="5m", - volume=volume_1, - ) - for result in results.contacted.values(): - assert result.get("changed") is True - assert result.get("module_stderr") is None - finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - - def test_data_set_temp_data_set_name(ansible_zos_module): try: hosts = ansible_zos_module @@ -966,7 +954,8 @@ def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): name=DEFAULT_DATA_SET_NAME, state="present", format=formats, - size="5m", + space_primary="5", + space_type="M", volume=volume_1, ) for result in results.contacted.values(): diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index 8778d80f9..42b415ae6 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -27,8 +27,6 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 9ceaf3c97..bf118f7b9 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -27,10 +27,7 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_data_set.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index 9ceaf3c97..bf118f7b9 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -27,10 +27,7 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_data_set.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index 70d4764e1..8176aa2bb 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -7,10 +7,7 @@ plugins/modules/zos_copy.py validate-modules:doc-type-does-not-match-spec # doc plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 55477a2d0..0167d6c81 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -4,10 +4,7 @@ plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 55477a2d0..0167d6c81 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -4,10 +4,7 @@ plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 55477a2d0..0167d6c81 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -4,10 +4,7 @@ plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index 992ec6099..62d724706 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -26,10 +26,7 @@ plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_copy.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_copy.py import-2.6!skip # Python 2.6 is unsupported -plugins/modules/zos_data_set.py validate-modules:doc-choices-do-not-match-spec # We use our own argument parser for advanced conditional and dependent arguments. -plugins/modules/zos_data_set.py validate-modules:doc-type-does-not-match-spec # Have to use raw here for backwards compatibility with old module args, but would confuse current users if exposed. plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_data_set.py validate-modules:undocumented-parameter # Keep aliases to match behavior of old module spec, but some aliases were functionally inaccurate, and detailing in docs would only confuse user. plugins/modules/zos_data_set.py compile-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_data_set.py import-2.6!skip # Python 2.6 is unsupported plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From b4156f8cc12bec1e3bca38c1c2eebc9e81ca78b5 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 13 Mar 2024 13:25:51 -0700 Subject: [PATCH 325/495] [v1.10.0] [Bugfix] [zos_job_submit] Fix non-printable chars handling and testing in jobs (#1300) * Update non-printable chars test * Add support for handling JSON decode errors Add support for when ZOAU v1.3.0 and later can't read and create JSON output from a job's output. --- plugins/module_utils/job.py | 6 +- .../modules/test_zos_job_submit_func.py | 55 ++++++++++++------- 2 files changed, 39 insertions(+), 22 deletions(-) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 1afdaed55..1f49a2b26 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -18,6 +18,10 @@ import traceback from time import sleep from timeit import default_timer as timer +# Only importing this module so we can catch a JSONDecodeError that sometimes happens +# when a job's output has non-printable chars that conflict with JSON's control +# chars. +from json import decoder from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) @@ -366,7 +370,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T single_dd["step_name"], single_dd["dd_name"] ) - except UnicodeDecodeError: + except (UnicodeDecodeError, decoder.JSONDecodeError): tmpcont = ( "Non-printable UTF-8 characters were present in this output. " "Please access it manually." diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 0694cdfa0..394a087ad 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -262,36 +262,49 @@ C_SRC_INVALID_UTF8 = """#include <stdio.h> int main() { - unsigned char a=0x64; - unsigned char b=0x2A; - unsigned char c=0xB8; - unsigned char d=0xFF; - unsigned char e=0x81; - unsigned char f=0x82; - unsigned char g=0x83; - unsigned char h=0x00; - printf("Value of a: Hex: %X, character: %c",a,a); - printf("Value of b: Hex: %X, character: %c",b,b); - printf("Value of c: Hex: %X, character: %c",c,c); - printf("Value of d: Hex: %X, character: %c",d,d); - printf("Value of a: Hex: %X, character: %c",e,e); - printf("Value of b: Hex: %X, character: %c",f,f); - printf("Value of c: Hex: %X, character: %c",g,g); - printf("Value of d: Hex: %X, character: %c",h,h); - return 0; + unsigned char a=0x64; + unsigned char b=0x2A; + unsigned char c=0xB8; + unsigned char d=0xFF; + unsigned char e=0x81; + unsigned char f=0x82; + unsigned char g=0x83; + unsigned char h=0x00; + /* The following are non-printables from DBB. */ + unsigned char nl=0x15; + unsigned char cr=0x0D; + unsigned char lf=0x25; + unsigned char shiftOut=0x0E; + unsigned char shiftIn=0x0F; + + printf("Value of a: Hex: %X, character: %c",a,a); + printf("Value of b: Hex: %X, character: %c",b,b); + printf("Value of c: Hex: %X, character: %c",c,c); + printf("Value of d: Hex: %X, character: %c",d,d); + printf("Value of e: Hex: %X, character: %c",e,e); + printf("Value of f: Hex: %X, character: %c",f,f); + printf("Value of g: Hex: %X, character: %c",g,g); + printf("Value of h: Hex: %X, character: %c",h,h); + printf("Value of NL: Hex: %X, character: %c",nl,nl); + printf("Value of CR: Hex: %X, character: %c",cr,cr); + printf("Value of LF: Hex: %X, character: %c",lf,lf); + printf("Value of Shift-Out: Hex: %X, character: %c",shiftOut,shiftOut); + printf("Value of Shift-In: Hex: %X, character: %c",shiftIn,shiftIn); + + return 0; } """ JCL_INVALID_UTF8_CHARS_EXC = """//* //****************************************************************************** //* Job that runs a C program that returns characters outside of the UTF-8 range -//* expected by Python. This job tests a bugfix present in ZOAU v1.3.0 onwards -//* that deals properly with these chars. +//* expected by Python. This job tests a bugfix present in ZOAU v1.3.0 and +//* later that deals properly with these chars. //* The JCL needs to be formatted to give it the directory where the C program //* is located. //****************************************************************************** //NOEBCDIC JOB (T043JM,JM00,1,0,0,0),'NOEBCDIC - JRM', -// MSGCLASS=X,MSGLEVEL=1,NOTIFY=&SYSUID +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=&SYSUID //NOPRINT EXEC PGM=BPXBATCH //STDPARM DD * SH ( @@ -774,7 +787,7 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): hosts.all.shell( cmd="echo {0} > {1}/noprint.c".format(quote(C_SRC_INVALID_UTF8), TEMP_PATH) ) - hosts.all.shell(cmd="xlc -o {0}/noprint {0}/noprint.c") + hosts.all.shell(cmd="xlc -o {0}/noprint {0}/noprint.c".format(TEMP_PATH)) # Create local JCL and submit it. tmp_file = tempfile.NamedTemporaryFile(delete=True) From 5a4f768d7472dd1524f7c80b8654dbba542996bb Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Tue, 19 Mar 2024 12:43:00 -0400 Subject: [PATCH 326/495] un-commented the blind catalog action to test if other fixes corrected the catalog issue (#1303) --- .../modules/test_zos_data_set_func.py | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 0167c1b83..7ab4685c0 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -153,9 +153,9 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst volume_1 = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - # hosts.all.zos_data_set( - # name=dataset, state="cataloged", volumes=volume_1 - # ) + hosts.all.zos_data_set( + name=dataset, state="cataloged", volumes=volume_1 + ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -212,9 +212,9 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s volume_1 = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - # hosts.all.zos_data_set( - # name=dataset, state="cataloged", volumes=volume_1 - # ) + hosts.all.zos_data_set( + name=dataset, state="cataloged", volumes=volume_1 + ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -258,9 +258,9 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ volume = volumes.get_available_vol() dataset = get_tmp_ds_name(2, 2) try: - # hosts.all.zos_data_set( - # name=dataset, state="cataloged", volumes=volume - # ) + hosts.all.zos_data_set( + name=dataset, state="cataloged", volumes=volume + ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -306,9 +306,9 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy volume_1 = volumes.get_available_vol() hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - # hosts.all.zos_data_set( - # name=dataset, state="cataloged", volumes=volume_1 - # ) + hosts.all.zos_data_set( + name=dataset, state="cataloged", volumes=volume_1 + ) hosts.all.zos_data_set(name=dataset, state="absent") hosts.all.file(path=TEMP_PATH, state="directory") @@ -345,7 +345,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans volume_2 = volumes.get_available_vol() hosts = ansible_zos_module dataset = get_tmp_ds_name(2, 2) - # hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) + hosts.all.zos_data_set(name=dataset, state="cataloged", volumes=volume_1) hosts.all.zos_data_set(name=dataset, state="absent") From 4c2be29b07d59a7739fa6449fa7fdcaf4c682e33 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 19 Mar 2024 11:24:35 -0600 Subject: [PATCH 327/495] Enabler/692/add changelog lint (#1304) * Create bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Added changelog action * Update changelog.yml * Create close-stale-issues * Update close-stale-issues Quite el workflow dispatch * Create bandit2.yml * Update bandit2.yml * Update zos_copy.py * Update zos_copy.py Me equivoque * Create ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Added ac changelog * added lint as an option * Added documentation to ac_changelog * Changed 'lint' to 'command' on ac_changelog * Create * Create first version of the changelog action * Update changelog.yml * Fix changelog.yml * Change name of action Antsibull 'Changelog lint' to AC Changelog lint * Rename 'changelog.yml' to 'ac_changelog.yml * Create ac_changelog.yml * Update ac_changelog.yml * Update ac_changelog.yml * Update ac_changelog.yml * Change path in 'venv setup' on ac * Change ac_changelog.yml * Change ac_changelog.yml * Change ac_changelog.yml * Change ac_changelog.yml * Removed not required github actions * Update zos_copy.py * Update ac_changelog.yml * Update ac_changelog.yml * Indented steps section * Modified changed line * Added changelog --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .github/workflows/ac_changelog.yml | 39 +++++++++++++++++++ ac | 37 ++++++++++++++++++ .../fragments/692-changelog-lint-ac-tool.yml | 8 ++++ 3 files changed, 84 insertions(+) create mode 100644 .github/workflows/ac_changelog.yml create mode 100644 changelogs/fragments/692-changelog-lint-ac-tool.yml diff --git a/.github/workflows/ac_changelog.yml b/.github/workflows/ac_changelog.yml new file mode 100644 index 000000000..523e207b9 --- /dev/null +++ b/.github/workflows/ac_changelog.yml @@ -0,0 +1,39 @@ +name: AC Changelog Lint + +on: + pull_request: + paths: + - 'changelogs/fragments/*' + branches: + - dev + - staging* + +jobs: + lint: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + pip install antsibull-changelog + + - name: Run ac-changelog + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-changelog --command lint diff --git a/ac b/ac index dad00194c..bb307f4a6 100755 --- a/ac +++ b/ac @@ -241,6 +241,32 @@ ac_build(){ $VENV_BIN/ansible-galaxy collection install -f ibm-ibm_zos_core-* } +# ------------------------------------------------------------------------------ +# Run a changelog lint locally +# ------------------------------------------------------------------------------ +#->ac-changelog: +## Runs antsibull-changelog to generate the release changelog or perform a lint +## on changelog fragments or release notes. +## Usage: ac [--ac-changelog <command>] +## <command> - choose from 'init', 'lint', 'lint-changelog-yaml', 'release', 'generate' +## - generate generate the changelog +## - init set up changelog infrastructure for collection, or an other project +## - lint check changelog fragments for syntax errors +## - lint-changelog-yaml check syntax of changelogs/changelog.yaml file +## - release add a new release to the change metadata +## Example: +## $ ac --ac-changelog --command lint +## $ ac --ac-changelog --command release +## $ ac --ac-changelog +ac_changelog(){ + option_command=$1 + if [ ! "$option_command" ]; then + option_command="lint" + fi + message "Running Changelog '$option_command'" + . $VENV_BIN/activate && antsibull-changelog "${option_command}" +} + # ------------------------------------------------------------------------------ # Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured) # ------------------------------------------------------------------------------ @@ -653,6 +679,10 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-build" ;; + --ac-changelog) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-changelog" + ;; --ac-install) ensure_managed_venv_exists $1 # Command option_submitted="--ac-install" @@ -716,6 +746,11 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--venv-stop" ;; + --command|--command=?*) # option + command=`option_processor $1 $2` + option_sanitize $command + shift + ;; --debug|--debug=?*) # option debug=`option_processor $1 $2` option_sanitize $debug @@ -800,6 +835,8 @@ if [ "$option_submitted" ] && [ "$option_submitted" = "--ac-bandit" ] ; then ac_bandit $level elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then ac_build +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then + ac_changelog $command elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then ac_install $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-lint" ] ; then diff --git a/changelogs/fragments/692-changelog-lint-ac-tool.yml b/changelogs/fragments/692-changelog-lint-ac-tool.yml new file mode 100644 index 000000000..cbf6bab7d --- /dev/null +++ b/changelogs/fragments/692-changelog-lint-ac-tool.yml @@ -0,0 +1,8 @@ +trivial: + - ac - Added new command ac-changelog into ac tool to run changelog + fragments lint and changelog release generation. + (https://github.com/ansible-collections/ibm_zos_core/pull/1304). + + - workflows/ac_changelog - Added new github action that will lint + changelog fragments upon a new pull request. + (https://github.com/ansible-collections/ibm_zos_core/pull/1304). \ No newline at end of file From 9c5bab3c39214ff2e4c0ab07f28a1624a0d336ae Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 20 Mar 2024 13:37:33 -0600 Subject: [PATCH 328/495] Changed case sensitive options --- .../functional/modules/test_zos_copy_func.py | 194 +++++++++--------- 1 file changed, 97 insertions(+), 97 deletions(-) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index cf7f1494b..bbd598f1c 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1684,7 +1684,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="PDSE", replace=True ) @@ -1733,7 +1733,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="PDSE", replace=True ) @@ -1784,7 +1784,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1834,7 +1834,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1884,7 +1884,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="PDSE", record_format="FBA", record_length=80, block_size=27920, @@ -1977,8 +1977,8 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": - hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) - hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=src_data_set, state="present", type="MEMBER", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="MEMBER", replace=True) # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -2266,7 +2266,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="PDSE", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2294,7 +2294,7 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2432,7 +2432,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="PDSE", state="present") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2458,7 +2458,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2489,7 +2489,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2520,7 +2520,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="PDSE", state="present") if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) @@ -2565,10 +2565,10 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) @@ -2611,14 +2611,14 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) - hosts.all.zos_data_set(name=dest, type="member", state="present") + hosts.all.zos_data_set(name=dest, type="MEMBER", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) @@ -2647,31 +2647,31 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", is_binary=False), - dict(type="seq", is_binary=True), - dict(type="pds", is_binary=False), - dict(type="pds", is_binary=True), - dict(type="pdse", is_binary=False), - dict(type="pdse", is_binary=True) + dict(type="PDSE", is_binary=False), + dict(type="PDSE", is_binary=True), + dict(type="PDSE", is_binary=False), + dict(type="PDSE", is_binary=True), + dict(type="PDSE", is_binary=False), + dict(type="PDSE", is_binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "PDSE" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "PDSE": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) verify_copy = hosts.all.shell( @@ -2694,32 +2694,32 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", force=False), - dict(type="seq", force=True), - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True) + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "PDSE" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "PDSE": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) - hosts.all.zos_data_set(name=dest, type="member") + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) + hosts.all.zos_data_set(name=dest, type="MEMBER") copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) verify_copy = hosts.all.shell( @@ -2838,7 +2838,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" @@ -2854,7 +2854,7 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): type=src_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, ) @@ -2877,18 +2877,18 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE", "PDSE"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if src_type == "PDSE" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=src_type) - if src_type != "seq": - hosts.all.zos_data_set(name=src, type="member") + if src_type != "PDSE": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), @@ -2918,10 +2918,10 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(src_type="pds", dest_type="pds"), - dict(src_type="pds", dest_type="pdse"), - dict(src_type="pdse", dest_type="pds"), - dict(src_type="pdse", dest_type="pdse"), + dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="PDSE", dest_type="PDSE"), ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -2973,7 +2973,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -2984,7 +2984,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3006,7 +3006,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3018,7 +3018,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3111,7 +3111,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3122,7 +3122,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -3132,7 +3132,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3143,7 +3143,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3261,7 +3261,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -3272,7 +3272,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3300,7 +3300,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3312,7 +3312,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3453,7 +3453,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -3464,7 +3464,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3524,7 +3524,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3615,7 +3615,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDSE", space_primary=2, record_format="FB", record_length=80, @@ -3626,7 +3626,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3651,7 +3651,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3663,7 +3663,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3827,7 +3827,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3878,7 +3878,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -3914,8 +3914,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): ds_list = ["{0}({1})".format(src, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDSE") + hosts.all.zos_data_set(name=dest, type="PDSE") for member in ds_list: hosts.all.shell( @@ -3960,8 +3960,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDSE") + hosts.all.zos_data_set(name=dest, type="PDSE") for src_member in src_ds_list: hosts.all.shell( @@ -3994,7 +3994,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("ds_type", ["pds", "pdse"]) +@pytest.mark.parametrize("ds_type", ["PDSE", "PDSE"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() @@ -4032,10 +4032,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="pds", force=False), - dict(ds_type="pds", force=True), - dict(ds_type="pdse", force=False), - dict(ds_type="pdse", force=True) + dict(ds_type="PDSE", force=False), + dict(ds_type="PDSE", force=True), + dict(ds_type="PDSE", force=False), + dict(ds_type="PDSE", force=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4079,7 +4079,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4124,7 +4124,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4170,7 +4170,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4206,10 +4206,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4218,7 +4218,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) + hosts.all.zos_data_set(name=dest, type="PDSE", state="present", replace=True) hosts.all.zos_data_set(name=src_ds, type=args["type"], state="present") for data_set in [src, dest]: @@ -4251,7 +4251,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("dest_type", ["pds", "pdse"]) +@pytest.mark.parametrize("dest_type", ["PDSE", "PDSE"]) def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" @@ -4262,7 +4262,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): type=dest_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=25, ) @@ -4294,10 +4294,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", backup=None), - dict(type="pds", backup="USER.TEST.PDS.BACKUP"), - dict(type="pdse", backup=None), - dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), + dict(type="PDSE", backup=None), + dict(type="PDSE", backup="USER.TEST.PDS.BACKUP"), + dict(type="PDSE", backup=None), + dict(type="PDSE", backup="USER.TEST.PDSE.BACKUP"), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4343,7 +4343,7 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDSE", "PDSE", "PDSE"]) def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() @@ -4359,8 +4359,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - if src_type != "seq": - hosts.all.zos_data_set(name=source_member, type="member", state='present') + if src_type != "PDSE": + hosts.all.zos_data_set(name=source_member, type="MEMBER", state='present') copy_res = hosts.all.zos_copy( src=source, @@ -4631,7 +4631,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( src_file = "/etc/profile" tmphlq = "TMPHLQ" try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="PDSE", state="present") copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) From 73eae1c3cd1786b242b359d5cf58d84d5b5eaf96 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 20 Mar 2024 19:57:12 -0600 Subject: [PATCH 329/495] Revert "Changed case sensitive options" This reverts commit 9c5bab3c39214ff2e4c0ab07f28a1624a0d336ae. --- .../functional/modules/test_zos_copy_func.py | 194 +++++++++--------- 1 file changed, 97 insertions(+), 97 deletions(-) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index bbd598f1c..cf7f1494b 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1684,7 +1684,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="seq", replace=True ) @@ -1733,7 +1733,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="seq", replace=True ) @@ -1784,7 +1784,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -1834,7 +1834,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -1884,7 +1884,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="seq", record_format="FBA", record_length=80, block_size=27920, @@ -1977,8 +1977,8 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": - hosts.all.zos_data_set(name=src_data_set, state="present", type="MEMBER", replace=True) - hosts.all.zos_data_set(name=dest_data_set, state="present", type="MEMBER", replace=True) + hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -2266,7 +2266,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2294,7 +2294,7 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2432,7 +2432,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2458,7 +2458,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2489,7 +2489,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2520,7 +2520,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) @@ -2565,10 +2565,10 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="PDSE", + type="pdse", space_primary=5, space_type="M", - record_format="FBA", + record_format="fba", record_length=80, replace=True ) @@ -2611,14 +2611,14 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="PDSE", + type="pdse", space_primary=5, space_type="M", - record_format="FBA", + record_format="fba", record_length=80, replace=True ) - hosts.all.zos_data_set(name=dest, type="MEMBER", state="present") + hosts.all.zos_data_set(name=dest, type="member", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) @@ -2647,31 +2647,31 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDSE", is_binary=False), - dict(type="PDSE", is_binary=True), - dict(type="PDSE", is_binary=False), - dict(type="PDSE", is_binary=True), - dict(type="PDSE", is_binary=False), - dict(type="PDSE", is_binary=True) + dict(type="seq", is_binary=False), + dict(type="seq", is_binary=True), + dict(type="pds", is_binary=False), + dict(type="pds", is_binary=True), + dict(type="pdse", is_binary=False), + dict(type="pdse", is_binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "PDSE" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "PDSE": - hosts.all.zos_data_set(name=src, type="MEMBER") + if args["type"] != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) + hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) verify_copy = hosts.all.shell( @@ -2694,32 +2694,32 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True) + dict(type="seq", force=False), + dict(type="seq", force=True), + dict(type="pds", force=False), + dict(type="pds", force=True), + dict(type="pdse", force=False), + dict(type="pdse", force=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "PDSE" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "PDSE": - hosts.all.zos_data_set(name=src, type="MEMBER") + if args["type"] != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) - hosts.all.zos_data_set(name=dest, type="MEMBER") + hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) + hosts.all.zos_data_set(name=dest, type="member") copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) verify_copy = hosts.all.shell( @@ -2838,7 +2838,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" @@ -2854,7 +2854,7 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): type=src_type, space_primary=5, space_type="M", - record_format="FBA", + record_format="fba", record_length=80, ) @@ -2877,18 +2877,18 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if src_type == "PDSE" else "{0}(TEST)".format(src_data_set) + src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=src_type) - if src_type != "PDSE": - hosts.all.zos_data_set(name=src, type="MEMBER") + if src_type != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), @@ -2918,10 +2918,10 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(src_type="PDSE", dest_type="PDSE"), - dict(src_type="PDSE", dest_type="PDSE"), - dict(src_type="PDSE", dest_type="PDSE"), - dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="pds", dest_type="pds"), + dict(src_type="pds", dest_type="pdse"), + dict(src_type="pdse", dest_type="pds"), + dict(src_type="pdse", dest_type="pdse"), ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -2973,7 +2973,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -2984,7 +2984,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3006,7 +3006,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3018,7 +3018,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3111,7 +3111,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3122,7 +3122,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -3132,7 +3132,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3143,7 +3143,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3261,7 +3261,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -3272,7 +3272,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3300,7 +3300,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3312,7 +3312,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3453,7 +3453,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -3464,7 +3464,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3524,7 +3524,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3615,7 +3615,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDSE", + type="pds", space_primary=2, record_format="FB", record_length=80, @@ -3626,7 +3626,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3651,7 +3651,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3663,7 +3663,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3827,7 +3827,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest, state="present", - type="PDSE", + type="pdse", record_format="U", record_length=0, block_size=32760, @@ -3878,7 +3878,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): hosts.all.zos_data_set( name=dest, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -3914,8 +3914,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): ds_list = ["{0}({1})".format(src, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="PDSE") - hosts.all.zos_data_set(name=dest, type="PDSE") + hosts.all.zos_data_set(name=src, type="pds") + hosts.all.zos_data_set(name=dest, type="pds") for member in ds_list: hosts.all.shell( @@ -3960,8 +3960,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="PDSE") - hosts.all.zos_data_set(name=dest, type="PDSE") + hosts.all.zos_data_set(name=src, type="pds") + hosts.all.zos_data_set(name=dest, type="pds") for src_member in src_ds_list: hosts.all.shell( @@ -3994,7 +3994,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("ds_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("ds_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() @@ -4032,10 +4032,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="PDSE", force=False), - dict(ds_type="PDSE", force=True), - dict(ds_type="PDSE", force=False), - dict(ds_type="PDSE", force=True) + dict(ds_type="pds", force=False), + dict(ds_type="pds", force=True), + dict(ds_type="pdse", force=False), + dict(ds_type="pdse", force=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4079,7 +4079,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4124,7 +4124,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4170,7 +4170,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4206,10 +4206,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), + dict(type="pds", force=False), + dict(type="pds", force=True), + dict(type="pdse", force=False), + dict(type="pdse", force=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4218,7 +4218,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present", replace=True) + hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) hosts.all.zos_data_set(name=src_ds, type=args["type"], state="present") for data_set in [src, dest]: @@ -4251,7 +4251,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("dest_type", ["PDSE", "PDSE"]) +@pytest.mark.parametrize("dest_type", ["pds", "pdse"]) def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" @@ -4262,7 +4262,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): type=dest_type, space_primary=5, space_type="M", - record_format="FBA", + record_format="fba", record_length=25, ) @@ -4294,10 +4294,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDSE", backup=None), - dict(type="PDSE", backup="USER.TEST.PDS.BACKUP"), - dict(type="PDSE", backup=None), - dict(type="PDSE", backup="USER.TEST.PDSE.BACKUP"), + dict(type="pds", backup=None), + dict(type="pds", backup="USER.TEST.PDS.BACKUP"), + dict(type="pdse", backup=None), + dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4343,7 +4343,7 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDSE", "PDSE", "PDSE"]) +@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() @@ -4359,8 +4359,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - if src_type != "PDSE": - hosts.all.zos_data_set(name=source_member, type="MEMBER", state='present') + if src_type != "seq": + hosts.all.zos_data_set(name=source_member, type="member", state='present') copy_res = hosts.all.zos_copy( src=source, @@ -4631,7 +4631,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( src_file = "/etc/profile" tmphlq = "TMPHLQ" try: - hosts.all.zos_data_set(name=dest, type="PDSE", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) From 6e08d0730800502958876813bdd7577199dc8d50 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 29 Mar 2024 09:06:26 -0600 Subject: [PATCH 330/495] Added Needs Triage on bug template (#1314) --- .github/ISSUE_TEMPLATE/bug_issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 2193cb615..9395c85b1 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -1,7 +1,7 @@ name: Report a bug description: Request that a bug be reviewed. Complete all required fields. title: "[Bug] Enter description" -labels: [Bug] +labels: ["Bug", "Needs Triage" ] assignees: - IBMAnsibleHelper body: From 3b4951042c5ae79587e66e52dae2fa27dc922e1a Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 29 Mar 2024 15:43:19 -0600 Subject: [PATCH 331/495] Add galaxy importer into ac as a command and create a GitHub action (#1305) * Create bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Added changelog action * Update changelog.yml * Create close-stale-issues * Update close-stale-issues Quite el workflow dispatch * Create bandit2.yml * Update bandit2.yml * Update zos_copy.py * Update zos_copy.py Me equivoque * Create ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Create ac_changelog.yml * Delete .github/workflows/ac_changelog.yml * Create ac_changelog.yml * Update ac_changelog.yml * Add galaxy importer to ac and create workflow with the ac command for it * Delete a jump of line * Create ac-galaxy-importer.yml * Rename action * Rename job * Update ac-galaxy-importer.yml * Fix * Fix * Rename ac-galaxy-importer to ac-galaxy-importer.yml * Acomodate function documentation in ac * Delete invasive files * Added line * Update ac * Update ac * Update ac --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .github/workflows/ac-galaxy-importer.yml | 40 ++++++++++++++++++++++++ ac | 18 +++++++++++ 2 files changed, 58 insertions(+) create mode 100644 .github/workflows/ac-galaxy-importer.yml diff --git a/.github/workflows/ac-galaxy-importer.yml b/.github/workflows/ac-galaxy-importer.yml new file mode 100644 index 000000000..271f01c22 --- /dev/null +++ b/.github/workflows/ac-galaxy-importer.yml @@ -0,0 +1,40 @@ +name: AC Galaxy Importer + +on: + pull_request: + branches: + - dev + - staging* + +jobs: + galaxy-importer: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + python -m pip install --upgrade pip + pip install ansible + pip install ansible-importer + pip install galaxy-importer + + - name: Run ac-galaxy-importer + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-galaxy-importer diff --git a/ac b/ac index bb307f4a6..9aee6a02d 100755 --- a/ac +++ b/ac @@ -242,6 +242,18 @@ ac_build(){ } # ------------------------------------------------------------------------------ +# Run galaxy importer on collection. +# ------------------------------------------------------------------------------ +#->ac-galaxy-importer: +## Build current branch and run galaxy importer on collection. +## Usage: ac [--ac-galaxy-importer] +## Example: +## $ ac --ac-galaxy-importer +ac_galaxy_importer(){ + message "Running Galaxy Importer" + . $VENV_BIN/activate && collection_name=$($VENV_BIN/ansible-galaxy collection build --force | awk -F/ '{print $NF}') && python -m galaxy_importer.main $collection_name +} + # Run a changelog lint locally # ------------------------------------------------------------------------------ #->ac-changelog: @@ -679,6 +691,10 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-build" ;; + --ac-galaxy-importer) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-galaxy-importer" + ;; --ac-changelog) # Command ensure_managed_venv_exists $1 option_submitted="--ac-changelog" @@ -835,6 +851,8 @@ if [ "$option_submitted" ] && [ "$option_submitted" = "--ac-bandit" ] ; then ac_bandit $level elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then ac_build +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-galaxy-importer" ] ; then + ac_galaxy_importer elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then ac_changelog $command elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then From 5788acdc7a16895407189603aee0c7ab965352ec Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 29 Mar 2024 15:45:03 -0600 Subject: [PATCH 332/495] Create bandit github action using the ac command (#1310) --- .github/workflows/ac-bandit.yml | 38 +++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 .github/workflows/ac-bandit.yml diff --git a/.github/workflows/ac-bandit.yml b/.github/workflows/ac-bandit.yml new file mode 100644 index 000000000..288fb92b1 --- /dev/null +++ b/.github/workflows/ac-bandit.yml @@ -0,0 +1,38 @@ +name: AC Bandit + +on: + pull_request: + branches: + - dev + - staging* + +jobs: + bandit: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + python -m pip install --upgrade pip + pip install bandit + + - name: Run ac-bandit + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-bandit --level l From f7e9c1bc3f27291009c5387e597e39c71405b7eb Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 29 Mar 2024 15:57:54 -0600 Subject: [PATCH 333/495] [Enabler] [zos_copy] Fix sanity issues and remove ignore files (#1307) * Removed localchartset * Fixed sanity * Updated to encoding * Updated encoding parser * Fixed encoding to null when remote_src=true * Updated condition to set encoding to none * removed size parameter * Changed src * Added full local src * Corrected base name for temp_path * Fixed pep8 issue * Changed temp_path to src logic * Added src to temp_path * Added module fail * Replaced temp name generation * Placed temporary file into tmp folder * Removing temp_path * Added latest temp path changes * Fixed lock check issue * Removed temp_path * Removed temp path * Removed is_something vasrs * Fixed comment * Added latest zos_copy_changes * Removed print statements * Removed ingore entry * removed entries * Corrected case sensitivity in tests * Fixed lowercase * Modified docs * Added changelog --- .../fragments/1307-update-sanity-zos_copy.yml | 10 + plugins/action/zos_copy.py | 61 +++-- plugins/modules/zos_copy.py | 224 ++++++++---------- .../functional/modules/test_zos_copy_func.py | 200 ++++++++-------- tests/sanity/ignore-2.14.txt | 2 - tests/sanity/ignore-2.15.txt | 2 - tests/sanity/ignore-2.16.txt | 2 - 7 files changed, 241 insertions(+), 260 deletions(-) create mode 100644 changelogs/fragments/1307-update-sanity-zos_copy.yml diff --git a/changelogs/fragments/1307-update-sanity-zos_copy.yml b/changelogs/fragments/1307-update-sanity-zos_copy.yml new file mode 100644 index 000000000..858f0b64c --- /dev/null +++ b/changelogs/fragments/1307-update-sanity-zos_copy.yml @@ -0,0 +1,10 @@ +minor_changes: + - zos_copy - Documented `group` and `owner` options. + (https://github.com/ansible-collections/ibm_zos_core/pull/1307). + +trivial: + - zos_copy - Removed many of the variables that were passed from the + action plugin to the module, reimplementing the logic inside the + module instead. Removed the use of temp_path variable inside zos_copy + in favor of using remote_src to deal with files copied to remote. + (https://github.com/ansible-collections/ibm_zos_core/pull/1307). \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 592126b00..e9c238b87 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -29,11 +29,10 @@ from ansible import cli from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - is_member, - is_data_set + is_member ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template @@ -69,8 +68,8 @@ def run(self, tmp=None, task_vars=None): owner = task_args.get("owner", None) group = task_args.get("group", None) - is_pds = is_src_dir = False - temp_path = is_uss = is_mvs_dest = src_member = None + is_src_dir = False + temp_path = is_uss = None if dest: if not isinstance(dest, string_types): @@ -78,7 +77,6 @@ def run(self, tmp=None, task_vars=None): return self._exit_action(result, msg, failed=True) else: is_uss = "/" in dest - is_mvs_dest = is_data_set(dest) else: msg = "Destination is required" return self._exit_action(result, msg, failed=True) @@ -96,13 +94,11 @@ def run(self, tmp=None, task_vars=None): msg = "'src' or 'dest' must not be empty" return self._exit_action(result, msg, failed=True) else: - src_member = is_member(src) if not remote_src: if src.startswith('~'): src = os.path.expanduser(src) src = os.path.realpath(src) is_src_dir = os.path.isdir(src) - is_pds = is_src_dir and is_mvs_dest if not src and not content: msg = "'src' or 'content' is required" @@ -196,11 +192,6 @@ def run(self, tmp=None, task_vars=None): src = rendered_dir - task_args["size"] = sum( - os.stat(os.path.join(validation.validate_safe_path(path), validation.validate_safe_path(f))).st_size - for path, dirs, files in os.walk(src) - for f in files - ) else: if mode == "preserve": task_args["mode"] = "0{0:o}".format( @@ -231,7 +222,6 @@ def run(self, tmp=None, task_vars=None): src = rendered_file - task_args["size"] = os.stat(src).st_size display.vvv(u"ibm_zos_copy calculated size: {0}".format(os.stat(src).st_size), host=self._play_context.remote_addr) transfer_res = self._copy_to_remote( src, is_dir=is_src_dir, ignore_stderr=ignore_sftp_stderr @@ -242,15 +232,31 @@ def run(self, tmp=None, task_vars=None): return transfer_res display.vvv(u"ibm_zos_copy temp path: {0}".format(transfer_res.get("temp_path")), host=self._play_context.remote_addr) + if not encoding: + encoding = { + "from": encode.Defaults.get_default_system_charset(), + } + + """ + We format temp_path correctly to pass it as src option to the module, + we keep the original source to return to the user and avoid confusion + by returning the temp_path created. + """ + original_src = task_args.get("src") + if original_src: + if not remote_src: + base_name = os.path.basename(original_src) + if original_src.endswith("/"): + src = temp_path + "/" + else: + src = temp_path + else: + src = temp_path + task_args.update( dict( - is_uss=is_uss, - is_pds=is_pds, - is_src_dir=is_src_dir, - src_member=src_member, - temp_path=temp_path, - is_mvs_dest=is_mvs_dest, - local_charset=encode.Defaults.get_default_system_charset() + src=src, + encoding=encoding, ) ) copy_res = self._execute_module( @@ -284,17 +290,20 @@ def run(self, tmp=None, task_vars=None): self._remote_cleanup(dest, copy_res.get("dest_exists"), task_vars) return result - return _update_result(is_binary, copy_res, self._task.args) + return _update_result(is_binary, copy_res, self._task.args, original_src) def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): """Copy a file or directory to the remote z/OS system """ - temp_path = "/{0}/{1}".format(gettempprefix(), _create_temp_path_name()) + temp_path = "/{0}/{1}/{2}".format(gettempprefix(), _create_temp_path_name(), os.path.basename(src)) + self._connection.exec_command("mkdir -p {0}".format(os.path.dirname(temp_path))) _src = src.replace("#", "\\#") _sftp_action = 'put' + full_temp_path = temp_path if is_dir: src = src.rstrip("/") if src.endswith("/") else src + temp_path = os.path.dirname(temp_path) base = os.path.basename(src) self._connection.exec_command("mkdir -p {0}/{1}".format(temp_path, base)) _sftp_action += ' -r' # add '-r` to clone the source trees @@ -379,7 +388,7 @@ def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): display.vvv(u"ibm_zos_copy SSH transfer method restored to {0}".format(user_ssh_transfer_method), host=self._play_context.remote_addr) is_ssh_transfer_method_updated = False - return dict(temp_path=temp_path) + return dict(temp_path=full_temp_path) def _remote_cleanup(self, dest, dest_exists, task_vars): """Remove all files or data sets pointed to by 'dest' on the remote @@ -417,7 +426,7 @@ def _exit_action(self, result, msg, failed=False): return result -def _update_result(is_binary, copy_res, original_args): +def _update_result(is_binary, copy_res, original_args, original_src): """ Helper function to update output result with the provided values """ ds_type = copy_res.get("ds_type") src = copy_res.get("src") @@ -431,7 +440,7 @@ def _update_result(is_binary, copy_res, original_args): invocation=dict(module_args=original_args), ) if src: - updated_result["src"] = src + updated_result["src"] = original_src if note: updated_result["note"] = note if backup_name: diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index a854d1cae..6991c4d81 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -143,7 +143,7 @@ to: description: - The encoding to be converted to - required: true + required: false type: str tmp_hlq: description: @@ -243,6 +243,15 @@ type: bool default: true required: false + group: + description: + - Name of the group that will own the file system objects. + - When left unspecified, it uses the current group of the current user + unless you are root, in which case it can preserve the previous + ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false mode: description: - The permission of the destination file or directory. @@ -261,6 +270,15 @@ the source file. type: str required: false + owner: + description: + - Name of the user that should own the filesystem object, as would be + passed to the chown command. + - When left unspecified, it uses the current user unless you are root, + in which case it can preserve the previous ownership. + - This option is only applicable if C(dest) is USS, otherwise ignored. + type: str + required: false remote_src: description: - If set to C(false), the module searches for C(src) at the local machine. @@ -803,37 +821,35 @@ """ -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - ZOAUImportError, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import ( - idcams -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - better_arg_parser, data_set, encode, backup, copy, validation, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( - AnsibleModuleHelper, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - is_member -) -from ansible.module_utils._text import to_bytes, to_native -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six import PY3 -from re import IGNORECASE -from hashlib import sha256 import glob +import math +import os import shutil import stat -import math import tempfile -import os import traceback +from hashlib import sha256 +from re import IGNORECASE + +from ansible.module_utils._text import to_bytes, to_native +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.six import PY3 +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + backup, better_arg_parser, copy, data_set, encode, validation) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import \ + AnsibleModuleHelper +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( + is_member, + is_data_set +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \ + ZOAUImportError +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import \ + idcams if PY3: - from re import fullmatch import pathlib + from re import fullmatch else: from re import match as fullmatch @@ -892,7 +908,6 @@ def run_command(self, cmd, **kwargs): def copy_to_seq( self, src, - temp_path, conv_path, dest, src_type @@ -904,13 +919,11 @@ def copy_to_seq( Arguments: src {str} -- Path to USS file or data set name - temp_path {str} -- Path to the location where the control node - transferred data to conv_path {str} -- Path to the converted source file dest {str} -- Name of destination data set src_type {str} -- Type of the source """ - new_src = conv_path or temp_path or src + new_src = conv_path or src copy_args = dict() copy_args["options"] = "" @@ -1031,15 +1044,15 @@ def copy_tree(self, src_dir, dest_dir, dirs_exist_ok=False): entries = list(itr) return self._copy_tree(entries, src_dir, dest_dir, dirs_exist_ok=dirs_exist_ok) - def convert_encoding(self, src, temp_path, encoding): + def convert_encoding(self, src, encoding, remote_src): """Convert encoding for given src Arguments: src {str} -- Path to the USS source file or directory - temp_path {str} -- Path to the location where the control node - transferred data to encoding {dict} -- Charsets that the source is to be converted from and to + remote_src {bool} -- Whether the file was already on the remote + node or not. Raises: CopyOperationError -- When the encoding of a USS file is not @@ -1051,19 +1064,10 @@ def convert_encoding(self, src, temp_path, encoding): from_code_set = encoding.get("from") to_code_set = encoding.get("to") enc_utils = encode.EncodeUtils() - new_src = temp_path or src - + new_src = src if os.path.isdir(new_src): - if temp_path: - if src.endswith("/"): - new_src = "{0}/{1}".format( - temp_path, os.path.basename(os.path.dirname(src)) - ) - else: - new_src = "{0}/{1}".format(temp_path, - os.path.basename(src)) try: - if not temp_path: + if remote_src: temp_dir = tempfile.mkdtemp() shutil.copytree(new_src, temp_dir, dirs_exist_ok=True) new_src = temp_dir @@ -1081,7 +1085,7 @@ def convert_encoding(self, src, temp_path, encoding): raise CopyOperationError(msg=str(err)) else: try: - if not temp_path: + if remote_src: fd, temp_src = tempfile.mkstemp() os.close(fd) shutil.copy(new_src, temp_src) @@ -1270,24 +1274,23 @@ def copy_to_uss( src, dest, conv_path, - temp_path, src_ds_type, src_member, member_name, - force + force, + content_copy, ): """Copy a file or data set to a USS location Arguments: src {str} -- The USS source dest {str} -- Destination file or directory on USS - temp_path {str} -- Path to the location where the control node - transferred data to conv_path {str} -- Path to the converted source file or directory src_ds_type {str} -- Type of source src_member {bool} -- Whether src is a data set member member_name {str} -- The name of the source data set member force {bool} -- Whether to copy files to an already existing directory + content_copy {bool} -- Whether copy is using content option or not. Returns: {str} -- Destination where the file was copied to @@ -1322,11 +1325,11 @@ def copy_to_uss( if "File exists" not in err: raise CopyOperationError(msg=to_native(err)) - if os.path.isfile(temp_path or conv_path or src): - dest = self._copy_to_file(src, dest, conv_path, temp_path) + if os.path.isfile(conv_path or src): + dest = self._copy_to_file(src, dest, content_copy, conv_path) changed_files = None else: - dest, changed_files = self._copy_to_dir(src, dest, conv_path, temp_path, force) + dest, changed_files = self._copy_to_dir(src, dest, conv_path, force) if self.common_file_args is not None: mode = self.common_file_args.get("mode") @@ -1347,14 +1350,13 @@ def copy_to_uss( self.module.set_owner_if_different(dest, owner, False) return dest - def _copy_to_file(self, src, dest, conv_path, temp_path): + def _copy_to_file(self, src, dest, content_copy, conv_path): """Helper function to copy a USS src to USS dest. Arguments: src {str} -- USS source file path dest {str} -- USS dest file path - temp_path {str} -- Path to the location where the control node - transferred data to + content_copy {bool} -- Whether copy is using content option or not. conv_path {str} -- Path to the converted source file or directory Raises: @@ -1363,11 +1365,10 @@ def _copy_to_file(self, src, dest, conv_path, temp_path): Returns: {str} -- Destination where the file was copied to """ - src_path = os.path.basename(src) if src else "inline_copy" + src_path = os.path.basename(src) if not content_copy else "inline_copy" if os.path.isdir(dest): dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_path)) - - new_src = temp_path or conv_path or src + new_src = conv_path or src try: if self.is_binary: copy.copy_uss2uss_binary(new_src, dest) @@ -1402,7 +1403,6 @@ def _copy_to_dir( src_dir, dest_dir, conv_path, - temp_path, force ): """Helper function to copy a USS directory to another USS directory. @@ -1413,8 +1413,6 @@ def _copy_to_dir( src_dir {str} -- USS source directory dest_dir {str} -- USS dest directory conv_path {str} -- Path to the converted source directory - temp_path {str} -- Path to the location where the control node - transferred data to force {bool} -- Whether to copy files to an already existing directory Raises: @@ -1426,14 +1424,7 @@ def _copy_to_dir( that got copied. """ copy_directory = True if not src_dir.endswith("/") else False - - if temp_path: - temp_path = "{0}/{1}".format( - temp_path, - os.path.basename(os.path.normpath(src_dir)) - ) - - new_src_dir = temp_path or conv_path or src_dir + new_src_dir = conv_path or src_dir new_src_dir = os.path.normpath(new_src_dir) dest = dest_dir changed_files, original_permissions = self._get_changed_files(new_src_dir, dest_dir, copy_directory) @@ -1661,7 +1652,6 @@ def __init__( def copy_to_pdse( self, src, - temp_path, conv_path, dest, src_ds_type, @@ -1676,8 +1666,6 @@ def copy_to_pdse( Arguments: src {str} -- Path to USS file/directory or data set name. - temp_path {str} -- Path to the location where the control node - transferred data to. conv_path {str} -- Path to the converted source file/directory. dest {str} -- Name of destination data set. src_ds_type {str} -- The type of source. @@ -1685,7 +1673,7 @@ def copy_to_pdse( dest_member {str, optional} -- Name of destination member in data set. encoding {dict, optional} -- Dictionary with encoding options. """ - new_src = conv_path or temp_path or src + new_src = conv_path or src src_members = [] dest_members = [] @@ -2660,15 +2648,10 @@ def run_module(module, arg_def): owner = module.params.get('owner') encoding = module.params.get('encoding') volume = module.params.get('volume') - is_uss = module.params.get('is_uss') - is_pds = module.params.get('is_pds') - is_src_dir = module.params.get('is_src_dir') - is_mvs_dest = module.params.get('is_mvs_dest') - temp_path = module.params.get('temp_path') - src_member = module.params.get('src_member') tmphlq = module.params.get('tmp_hlq') force = module.params.get('force') force_lock = module.params.get('force_lock') + content = module.params.get('content') dest_data_set = module.params.get('dest_data_set') if dest_data_set: @@ -2676,6 +2659,13 @@ def run_module(module, arg_def): dest_data_set["volumes"] = [volume] copy_member = is_member(dest) + # This section we initialize different variables + # that we used to pass from the action plugin. + is_src_dir = os.path.isdir(src) + is_uss = "/" in dest + is_mvs_dest = is_data_set(dest) + is_pds = is_src_dir and is_mvs_dest + src_member = is_member(src) # ******************************************************************** # When copying to and from a data set member, 'dest' or 'src' will be @@ -2722,18 +2712,17 @@ def run_module(module, arg_def): # data sets with record format 'FBA' or 'VBA'. src_has_asa_chars = dest_has_asa_chars = False try: - # If temp_path, the plugin has copied a file from the controller to USS. - if temp_path or "/" in src: + if "/" in src: src_ds_type = "USS" - if remote_src and os.path.isdir(src): + if os.path.isdir(src): is_src_dir = True # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. if not is_binary and not is_uss and not executable: - new_src = temp_path or src + new_src = src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). encode_utils = encode.EncodeUtils() @@ -2790,9 +2779,8 @@ def run_module(module, arg_def): if is_uss: dest_ds_type = "USS" if src_ds_type == "USS" and not is_src_dir and (dest.endswith("/") or os.path.isdir(dest)): - src_basename = os.path.basename(src) if src else "inline_copy" + src_basename = os.path.basename(src) if not content else "inline_copy" dest = os.path.normpath("{0}/{1}".format(dest, src_basename)) - if dest.startswith("//"): dest = dest.replace("//", "/") @@ -2841,12 +2829,7 @@ def run_module(module, arg_def): if copy_member: dest_member_exists = dest_exists and data_set.DataSet.data_set_member_exists(dest) elif src_ds_type == "USS": - if temp_path: - root_dir = "{0}/{1}".format(temp_path, os.path.basename(os.path.normpath(src))) - root_dir = os.path.normpath(root_dir) - else: - root_dir = src - + root_dir = src dest_member_exists = dest_exists and data_set.DataSet.files_in_data_set_members(root_dir, dest) elif src_ds_type in data_set.DataSet.MVS_PARTITIONED: dest_member_exists = dest_exists and data_set.DataSet.data_set_shared_members(src, dest) @@ -2987,17 +2970,13 @@ def run_module(module, arg_def): # original one. This change applies only to the # allocate_destination_data_set call. if converted_src: - if remote_src: - original_src = src - src = converted_src - else: - original_temp = temp_path - temp_path = converted_src + original_src = src + src = converted_src try: if not is_uss: res_args["changed"], res_args["dest_data_set_attrs"] = allocate_destination_data_set( - temp_path or src, + src, dest_name, src_ds_type, dest_ds_type, dest_exists, @@ -3010,20 +2989,14 @@ def run_module(module, arg_def): ) except Exception as err: if converted_src: - if remote_src: - src = original_src - else: - temp_path = original_temp + src = original_src module.fail_json( msg="Unable to allocate destination data set: {0}".format(str(err)), dest_exists=dest_exists ) if converted_src: - if remote_src: - src = original_src - else: - temp_path = original_temp + src = original_src # ******************************************************************** # Encoding conversion is only valid if the source is a local file, @@ -3044,7 +3017,7 @@ def run_module(module, arg_def): # if is_mvs_dest: # encoding["to"] = encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET - conv_path = copy_handler.convert_encoding(src, temp_path, encoding) + conv_path = copy_handler.convert_encoding(src, encoding, remote_src) # ------------------------------- o ----------------------------------- # Copy to USS file or directory @@ -3068,17 +3041,17 @@ def run_module(module, arg_def): src, dest, conv_path, - temp_path, src_ds_type, src_member, member_name, - force + force, + bool(content) ) res_args['size'] = os.stat(dest).st_size remote_checksum = dest_checksum = None try: - remote_checksum = get_file_checksum(temp_path or src) + remote_checksum = get_file_checksum(src) dest_checksum = get_file_checksum(dest) if validate: @@ -3100,12 +3073,11 @@ def run_module(module, arg_def): elif dest_ds_type in data_set.DataSet.MVS_SEQ: # TODO: check how ASA behaves with this if src_ds_type == "USS" and not is_binary: - new_src = conv_path or temp_path or src + new_src = conv_path or src conv_path = normalize_line_endings(new_src, encoding) copy_handler.copy_to_seq( src, - temp_path, conv_path, dest, src_ds_type @@ -3117,8 +3089,6 @@ def run_module(module, arg_def): # Copy to PDS/PDSE # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED or dest_ds_type == "LIBRARY": - if not remote_src and not copy_member and os.path.isdir(temp_path): - temp_path = os.path.join(validation.validate_safe_path(temp_path), validation.validate_safe_path(os.path.basename(src))) pdse_copy_handler = PDSECopyHandler( module, @@ -3132,7 +3102,6 @@ def run_module(module, arg_def): pdse_copy_handler.copy_to_pdse( src, - temp_path, conv_path, dest_name, src_ds_type, @@ -3163,7 +3132,7 @@ def run_module(module, arg_def): ) ) - return res_args, temp_path, conv_path + return res_args, conv_path def main(): @@ -3185,7 +3154,7 @@ def main(): ), "to": dict( type='str', - required=True, + required=False, ) } ), @@ -3255,14 +3224,6 @@ def main(): auto_reload=dict(type='bool', default=False), ) ), - is_uss=dict(type='bool'), - is_pds=dict(type='bool'), - is_src_dir=dict(type='bool'), - is_mvs_dest=dict(type='bool'), - size=dict(type='int'), - temp_path=dict(type='str'), - src_member=dict(type='bool'), - local_charset=dict(type='str'), force=dict(type='bool', default=False), force_lock=dict(type='bool', default=False), mode=dict(type='str', required=False), @@ -3333,15 +3294,16 @@ def main(): ) if ( - not module.params.get("encoding") + not module.params.get("encoding").get("to") and not module.params.get("remote_src") and not module.params.get("is_binary") and not module.params.get("executable") ): - module.params["encoding"] = { - "from": module.params.get("local_charset"), - "to": encode.Defaults.get_default_system_charset(), - } + module.params["encoding"]["to"] = encode.Defaults.get_default_system_charset() + elif ( + not module.params.get("encoding").get("to") + ): + module.params["encoding"] = None if module.params.get("encoding"): module.params.update( @@ -3357,15 +3319,15 @@ def main(): ) ) - res_args = temp_path = conv_path = None + res_args = conv_path = None try: - res_args, temp_path, conv_path = run_module(module, arg_def) + res_args, conv_path = run_module(module, arg_def) module.exit_json(**res_args) except CopyOperationError as err: cleanup([]) module.fail_json(**(err.json_args)) finally: - cleanup([temp_path, conv_path]) + cleanup([conv_path]) class EncodingConversionError(Exception): diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index cf7f1494b..6e6a9a073 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -794,6 +794,12 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_ @pytest.mark.uss @pytest.mark.parametrize("copy_directory", [False, True]) def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory): + """ + This test evaluates the behavior of testing copy of a directory when src ends + with '/' versus only the dir name. Expectation is that when only dir name is provided + that directory is also created on the remote, when directory name ends with '/' + this means we only copy that directory contents without creating it on the remote. + """ hosts = ansible_zos_module dest_path = "/tmp/new_dir" @@ -1684,7 +1690,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="SEQ", replace=True ) @@ -1733,7 +1739,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="SEQ", replace=True ) @@ -1784,7 +1790,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1834,7 +1840,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -1884,7 +1890,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="seq", + type="SEQ", record_format="FBA", record_length=80, block_size=27920, @@ -1977,8 +1983,8 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "PDS" or ds_type == "PDSE": - hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) - hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=src_data_set, state="present", type="MEMBER", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="MEMBER", replace=True) # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -2266,7 +2272,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2294,7 +2300,7 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2432,7 +2438,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2458,7 +2464,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2489,7 +2495,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") + hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2520,7 +2526,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) @@ -2565,10 +2571,10 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) @@ -2611,14 +2617,14 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="pdse", + type="PDSE", space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, replace=True ) - hosts.all.zos_data_set(name=dest, type="member", state="present") + hosts.all.zos_data_set(name=dest, type="MEMBER", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) @@ -2647,31 +2653,31 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", is_binary=False), - dict(type="seq", is_binary=True), - dict(type="pds", is_binary=False), - dict(type="pds", is_binary=True), - dict(type="pdse", is_binary=False), - dict(type="pdse", is_binary=True) + dict(type="SEQ", is_binary=False), + dict(type="SEQ", is_binary=True), + dict(type="PDS", is_binary=False), + dict(type="PDS", is_binary=True), + dict(type="PDSE", is_binary=False), + dict(type="PDSE", is_binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "SEQ": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) verify_copy = hosts.all.shell( @@ -2694,32 +2700,32 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", force=False), - dict(type="seq", force=True), - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True) + dict(type="SEQ", force=False), + dict(type="SEQ", force=True), + dict(type="PDS", force=False), + dict(type="PDS", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "seq": - hosts.all.zos_data_set(name=src, type="member") + if args["type"] != "SEQ": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) - hosts.all.zos_data_set(name=dest, type="member") + hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) + hosts.all.zos_data_set(name=dest, type="MEMBER") copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) verify_copy = hosts.all.shell( @@ -2838,7 +2844,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" @@ -2854,7 +2860,7 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): type=src_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=80, ) @@ -2877,18 +2883,18 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) + src = src_data_set if src_type == "SEQ" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=src_type) - if src_type != "seq": - hosts.all.zos_data_set(name=src, type="member") + if src_type != "SEQ": + hosts.all.zos_data_set(name=src, type="MEMBER") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), @@ -2918,10 +2924,10 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(src_type="pds", dest_type="pds"), - dict(src_type="pds", dest_type="pdse"), - dict(src_type="pdse", dest_type="pds"), - dict(src_type="pdse", dest_type="pdse"), + dict(src_type="PDS", dest_type="PDS"), + dict(src_type="PDS", dest_type="PDSE"), + dict(src_type="PDSE", dest_type="PDS"), + dict(src_type="PDSE", dest_type="PDSE"), ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -2973,7 +2979,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -2984,7 +2990,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3006,7 +3012,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3018,7 +3024,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3111,7 +3117,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3122,7 +3128,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3132,7 +3138,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3143,7 +3149,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3261,7 +3267,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3272,7 +3278,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3300,7 +3306,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3312,7 +3318,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3453,7 +3459,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3464,7 +3470,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3524,7 +3530,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3615,7 +3621,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="pds", + type="PDS", space_primary=2, record_format="FB", record_length=80, @@ -3626,7 +3632,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3651,7 +3657,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3663,7 +3669,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3827,7 +3833,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", record_format="U", record_length=0, block_size=32760, @@ -3878,7 +3884,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): hosts.all.zos_data_set( name=dest, state="present", - type="pdse", + type="PDSE", replace=True ) @@ -3914,8 +3920,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): ds_list = ["{0}({1})".format(src, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDS") + hosts.all.zos_data_set(name=dest, type="PDS") for member in ds_list: hosts.all.shell( @@ -3960,8 +3966,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="pds") - hosts.all.zos_data_set(name=dest, type="pds") + hosts.all.zos_data_set(name=src, type="PDS") + hosts.all.zos_data_set(name=dest, type="PDS") for src_member in src_ds_list: hosts.all.shell( @@ -3994,7 +4000,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("ds_type", ["pds", "pdse"]) +@pytest.mark.parametrize("ds_type", ["PDS", "PDSE"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() @@ -4032,10 +4038,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="pds", force=False), - dict(ds_type="pds", force=True), - dict(ds_type="pdse", force=False), - dict(ds_type="pdse", force=True) + dict(ds_type="PDS", force=False), + dict(ds_type="PDS", force=True), + dict(ds_type="PDSE", force=False), + dict(ds_type="PDSE", force=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4079,7 +4085,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4124,7 +4130,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4170,7 +4176,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4206,10 +4212,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True), + dict(type="PDS", force=False), + dict(type="PDS", force=True), + dict(type="PDSE", force=False), + dict(type="PDSE", force=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4218,7 +4224,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) + hosts.all.zos_data_set(name=dest, type="SEQ", state="present", replace=True) hosts.all.zos_data_set(name=src_ds, type=args["type"], state="present") for data_set in [src, dest]: @@ -4251,7 +4257,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("dest_type", ["pds", "pdse"]) +@pytest.mark.parametrize("dest_type", ["PDS", "PDSE"]) def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" @@ -4262,7 +4268,7 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): type=dest_type, space_primary=5, space_type="M", - record_format="fba", + record_format="FBA", record_length=25, ) @@ -4294,10 +4300,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", backup=None), - dict(type="pds", backup="USER.TEST.PDS.BACKUP"), - dict(type="pdse", backup=None), - dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), + dict(type="PDS", backup=None), + dict(type="PDS", backup="USER.TEST.PDS.BACKUP"), + dict(type="PDSE", backup=None), + dict(type="PDSE", backup="USER.TEST.PDSE.BACKUP"), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4343,7 +4349,7 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) +@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() @@ -4359,8 +4365,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - if src_type != "seq": - hosts.all.zos_data_set(name=source_member, type="member", state='present') + if src_type != "SEQ": + hosts.all.zos_data_set(name=source_member, type="MEMBER", state='present') copy_res = hosts.all.zos_copy( src=source, @@ -4631,7 +4637,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( src_file = "/etc/profile" tmphlq = "TMPHLQ" try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.zos_data_set(name=dest, type="SEQ", state="present") copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 0167d6c81..c04ae2328 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 0167d6c81..c04ae2328 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index 0167d6c81..c04ae2328 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -2,8 +2,6 @@ plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed und plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_copy.py validate-modules:parameter-type-not-in-doc # Passing args from action plugin -plugins/modules/zos_copy.py validate-modules:undocumented-parameter # Passing args from action plugin plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From ed26cf81b29f896b477156252a23f9ba9fb645d2 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 2 Apr 2024 09:04:12 -0700 Subject: [PATCH 334/495] Cherry picking 1.9 into dev (#1346) * [v1.9.0] Collaboration 1246 to add typrun support for zos_job_submit (#1283) * Fixes typo in property Signed-off-by: ddimatos <dimatos@gmail.com> * Initial commit for supporting typrun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update jobs and zos_job_submit to better support jobs in the input queue Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit to remove other typrun scans from regex Signed-off-by: ddimatos <dimatos@gmail.com> * The ret_code msg field should have only had the status in it, not the RC Signed-off-by: ddimatos <dimatos@gmail.com> * Update msg_txt for jobs JCLHOlD, HOLD Signed-off-by: ddimatos <dimatos@gmail.com> * Update test cases with typrun Signed-off-by: ddimatos <dimatos@gmail.com> * Lint updates Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to controll the messages to the ret_code property Signed-off-by: ddimatos <dimatos@gmail.com> * Update wait times as result of the timer fix forced tests to add more time Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: plugins/module_utils/job.py plugins/modules/zos_job_submit.py tests/functional/modules/test_zos_job_submit_func.py Changes to be committed: new file: changelogs/fragments/1246-bugfix-zos_job_submit-typrun.yml modified: plugins/module_utils/job.py modified: plugins/modules/zos_job_submit.py modified: tests/functional/modules/test_zos_job_submit_func.py * [v1.9.0] Document the collections SFTP requirement and file tagging. (#1296) * Fixes typo in property Signed-off-by: ddimatos <dimatos@gmail.com> * Initial commit for supporting typrun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update jobs and zos_job_submit to better support jobs in the input queue Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit to remove other typrun scans from regex Signed-off-by: ddimatos <dimatos@gmail.com> * The ret_code msg field should have only had the status in it, not the RC Signed-off-by: ddimatos <dimatos@gmail.com> * Update msg_txt for jobs JCLHOlD, HOLD Signed-off-by: ddimatos <dimatos@gmail.com> * Update test cases with typrun Signed-off-by: ddimatos <dimatos@gmail.com> * Lint updates Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to controll the messages to the ret_code property Signed-off-by: ddimatos <dimatos@gmail.com> * Update wait times as result of the timer fix forced tests to add more time Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc for zos_archive to reference src over path Signed-off-by: ddimatos <dimatos@gmail.com> * Update docs to reference the SFTP requirement Signed-off-by: ddimatos <dimatos@gmail.com> * Update plugin doc Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy to explain that file tagging (chtag) is performed on updated USS files Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected typo Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected typo Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected typo Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * update galaxy.yml Signed-off-by: ddimatos <dimatos@gmail.com> * Updte meta/runtime.yml with the version 2.15 Signed-off-by: ddimatos <dimatos@gmail.com> * Update meta collection with lastest versions Signed-off-by: ddimatos <dimatos@gmail.com> * Update README Signed-off-by: ddimatos <dimatos@gmail.com> * Update lint and galaxy to reflect 2.14 Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog summary Signed-off-by: ddimatos <dimatos@gmail.com> * update versions for zoau version checker Signed-off-by: ddimatos <dimatos@gmail.com> * Fix array syntax Signed-off-by: ddimatos <dimatos@gmail.com> * Documentation required for wtor filter Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog meta Signed-off-by: ddimatos <dimatos@gmail.com> * update filters general doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update submit modules doc Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: plugins/modules/zos_job_submit.py Changes to be committed: modified: plugins/modules/zos_job_submit.py * Update the rst for submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes rst Signed-off-by: ddimatos <dimatos@gmail.com> * Correct lint warning Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy module doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update RST for zos_copy Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright year Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: plugins/modules/zos_apf.py Changes to be committed: modified: docs/source/release_notes.rst modified: tests/functional/modules/test_zos_job_query_func.py * Delete changelog fragments after generating CHANGELOG Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml changelogs/fragments/1261-job-submit-non-utf8-chars.yml changelogs/fragments/1292-doc-zos_tso_command-example.yml changelogs/fragments/1295-doc-zos_ping-scp.yml Changes to be committed: deleted: changelogs/fragments/1246-bugfix-zos_job_submit-typrun.yml deleted: changelogs/fragments/1296-doc-sftp-collection-requirements.yml deleted: changelogs/fragments/v1.9.0_summary.yml * Update source comment to align to code change Signed-off-by: ddimatos <dimatos@gmail.com> * Update source documentation after pull request review Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: plugins/modules/zos_job_submit.py Changes to be committed: modified: docs/source/release_notes.rst modified: plugins/filter/wtor.py modified: plugins/module_utils/job.py modified: plugins/modules/zos_copy.py modified: plugins/modules/zos_job_submit.py modified: plugins/modules/zos_ping.py modified: plugins/modules/zos_tso_command.py modified: tests/functional/modules/test_zos_job_query_func.py modified: tests/functional/modules/test_zos_job_submit_func.py * Typo correction Signed-off-by: ddimatos <dimatos@gmail.com> * Update Galaxy Signed-off-by: ddimatos <dimatos@gmail.com> * Update RST Signed-off-by: ddimatos <dimatos@gmail.com> * Changes to submit module after forward porting typrun support Signed-off-by: ddimatos <dimatos@gmail.com> * Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> * Update test cases to use upper case data set types due to choice requirments Signed-off-by: ddimatos <dimatos@gmail.com> * Updated test expected text Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected typo and added test cleanup --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .ansible-lint | 1 + CHANGELOG.rst | 22 +- README.md | 11 +- changelogs/.plugin-cache.yaml | 8 +- changelogs/changelog.yaml | 70 +++++ docs/source/filters.rst | 10 +- docs/source/modules/zos_archive.rst | 10 +- docs/source/modules/zos_backup_restore.rst | 9 + docs/source/modules/zos_copy.rst | 6 +- docs/source/modules/zos_data_set.rst | 28 +- docs/source/modules/zos_fetch.rst | 2 +- docs/source/modules/zos_job_submit.rst | 53 ++-- docs/source/modules/zos_script.rst | 4 +- docs/source/modules/zos_tso_command.rst | 2 +- docs/source/modules/zos_unarchive.rst | 18 +- docs/source/plugins.rst | 37 +-- docs/source/release_notes.rst | 93 +++++-- galaxy.yml | 3 +- meta/ibm_zos_core_meta.yml | 6 +- meta/runtime.yml | 2 +- plugins/filter/wtor.py | 55 ++++ plugins/module_utils/job.py | 137 ++++++---- plugins/modules/zos_copy.py | 23 +- plugins/modules/zos_fetch.py | 9 +- plugins/modules/zos_job_submit.py | 253 ++++++++++++------ plugins/modules/zos_ping.py | 2 +- plugins/modules/zos_ping.rexx | 2 +- plugins/modules/zos_script.py | 11 +- plugins/modules/zos_tso_command.py | 2 +- plugins/modules/zos_unarchive.py | 13 +- .../modules/test_zos_job_query_func.py | 6 +- .../modules/test_zos_job_submit_func.py | 232 +++++++++++++--- tests/unit/test_zoau_version_checker_unit.py | 16 +- 33 files changed, 842 insertions(+), 314 deletions(-) diff --git a/.ansible-lint b/.ansible-lint index 821806e3a..9d40faf3b 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -35,6 +35,7 @@ exclude_paths: - tests/sanity/ignore-2.11.txt - tests/sanity/ignore-2.12.txt - tests/sanity/ignore-2.13.txt + - tests/sanity/ignore-2.14.txt - venv* parseable: true quiet: false diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 505a98474..d2f69d546 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,18 +5,23 @@ ibm.ibm_zos_core Release Notes .. contents:: Topics -v1.9.0-beta.1 -============= +v1.9.0 +====== Release Summary --------------- -Release Date: '2024-01-31' +Release Date: '2024-03-11' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ +Major Changes +------------- + +- zos_job_submit - when job statuses were read, were limited to AC (active), CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC (security error), JCLERROR (job had a jcl error). Now the additional statuses are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + Minor Changes ------------- @@ -32,11 +37,22 @@ Minor Changes Bugfixes -------- +- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). +- zos_apf - When operation=list was selected and more than one data set entry was fetched, the module only returned one data set. Fix now returns the complete list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). - zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). - zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). +- zos_data_set - Fixes a small parsing bug in module_utils/data_set function which extracts volume serial(s) from a LISTCAT command output. Previously a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). - zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). +- zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained JCLERROR followed by an integer where the integer appeared to be a reason code when actually it is a multi line marker used to coordinate errors spanning more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when a response was returned, it contained an undocumented property; ret_code[msg_text]. Now when a response is returned, it correctly returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=copy was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=hold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=jchhold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=scan was used in JCL, it would fail the module. Now typrun=scan no longer fails the module and an appropriate message is returned with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when wait_time_s was used, the duration would run approximately 5 second longer than reported in the duration. Now the when duration is returned, it is the actual accounting from when the job is submitted to when the module reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). - zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). diff --git a/README.md b/README.md index da3b114d4..b2345c118 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ and ansible-doc to automate tasks on z/OS. Ansible version compatibility ============================= -This collection has been tested against **Ansible Core** versions >=2.14. +This collection has been tested against **Ansible Core** versions >=2.15. The Ansible Core versions supported for this collection align to the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the [Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** @@ -64,11 +64,12 @@ for more more information on supported versions of Ansible. Other Dependencies ================== -This release of the **IBM z/OS core collection** requires the z/OS managed node have: -- [z/OS](https://www.ibm.com/docs/en/zos) V2R4 or later. +This release of the **IBM z/OS core collection** requires the z/OS managed node have the following: +- [z/OS](https://www.ibm.com/docs/en/zos) - [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). -- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11. -- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) 1.2.5 (or later) but prior to version 1.3. +- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) +- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) +For specific dependency versions, please review the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) for the version of the IBM Ansible z/OS core installed. Copyright ========= diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 899014cd9..4e2979ebb 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -6,7 +6,11 @@ plugins: callback: {} cliconf: {} connection: {} - filter: {} + filter: + filter_wtor_messages: + description: Filter a list of WTOR messages + name: filter_wtor_messages + version_added: 1.2.0 httpapi: {} inventory: {} lookup: {} @@ -131,4 +135,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.9.0-beta.1 +version: 1.9.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index c05af6436..a8404bf84 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1176,6 +1176,76 @@ releases: name: zos_script namespace: '' release_date: '2023-10-24' + 1.9.0: + changes: + bugfixes: + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). + - zos_apf - When operation=list was selected and more than one data set entry + was fetched, the module only returned one data set. Fix now returns the complete + list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). + - zos_data_set - Fixes a small parsing bug in module_utils/data_set function + which extracts volume serial(s) from a LISTCAT command output. Previously + a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). + - zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained + JCLERROR followed by an integer where the integer appeared to be a reason + code when actually it is a multi line marker used to coordinate errors spanning + more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned + for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when a response was returned, it contained an undocumented + property; ret_code[msg_text]. Now when a response is returned, it correctly + returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=copy was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=hold was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=jchhold was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=scan was used in JCL, it would fail the module. + Now typrun=scan no longer fails the module and an appropriate message is returned + with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when wait_time_s was used, the duration would run approximately + 5 second longer than reported in the duration. Now the when duration is returned, + it is the actual accounting from when the job is submitted to when the module + reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + major_changes: + - zos_job_submit - when job statuses were read, were limited to AC (active), + CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC + (security error), JCLERROR (job had a jcl error). Now the additional statuses + are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter + error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + release_summary: 'Release Date: ''2024-03-11'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1120-bugfix-zos_job_submit-default_value.yml + - 1236-bugfix-zos_apf-return-list.yml + - 1246-bugfix-zos_job_submit-typrun.yml + - 1247-volser-parsing-leading-dash-bugfix.yml + - 1288-job-submit-non-utf8-chars.yml + - 1292-doc-zos_tso_command-example.yml + - 1294-doc-zos_ping-scp.yml + - 1296-doc-sftp-collection-requirements.yml + - v1.9.0_summary.yml + release_date: '2024-03-16' 1.9.0-beta.1: changes: bugfixes: diff --git a/docs/source/filters.rst b/docs/source/filters.rst index 51e3a034f..bbf24c6d4 100644 --- a/docs/source/filters.rst +++ b/docs/source/filters.rst @@ -5,13 +5,9 @@ Filters ======= -Filters in Ansible are from Jinja2, and are used to transform data inside -a template expression. The templates operate on the Ansible controller, and not -on the target host. Therefore, filters execute on the controller as they augment -the data locally. - -Jinja2 ships with many filters as does Ansible, and also allows users to add -their own custom filters. +Filters are used to transform data inside a template expression. The templates +operate on the Ansible controller, not on the managed node. Therefore, +filters execute on the controller as they augment the data locally. The **IBM z/OS core collection** includes filters and their usage in sample playbooks. Unlike collections that can be identified at the top level using the diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 525c7c0be..fe93474f0 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -342,7 +342,7 @@ Examples # Simple archive - name: Archive file into a tar zos_archive: - path: /tmp/archive/foo.txt + src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: name: tar @@ -350,7 +350,7 @@ Examples # Archive multiple files - name: Compress list of files into a zip zos_archive: - path: + src: - /tmp/archive/foo.txt - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip @@ -360,7 +360,7 @@ Examples # Archive one data set into terse - name: Compress data set into a terse zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -368,7 +368,7 @@ Examples # Use terse with different options - name: Compress data set into a terse, specify pack algorithm and use adrdssu zos_archive: - path: "USER.ARCHIVE.TEST" + src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: name: terse @@ -379,7 +379,7 @@ Examples # Use a pattern to store - name: Compress data set pattern using xmit zos_archive: - path: "USER.ARCHIVE.*" + src: "USER.ARCHIVE.*" exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index cc6c60d66..d70efc7a1 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -200,6 +200,15 @@ hlq | **type**: str +tmp_hlq + Override the default high level qualifier (HLQ) for temporary and backup data sets. + + The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used. + + | **required**: False + | **type**: str + + Examples diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 86a3a9463..00e274b00 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -91,6 +91,8 @@ dest If ``dest`` is a nonexistent USS file, it will be created. + If ``dest`` is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the copy will fail. + If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. @@ -787,9 +789,9 @@ Notes For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option executable that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will be responded with a (FSUM8976,./zos_copy.html) error. + Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option ``executable`` that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 70e798a08..0ea34875f 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -97,7 +97,7 @@ type ``MEMBER`` expects to be used with an existing partitioned data set. - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str @@ -139,7 +139,7 @@ space_type record_format The format of the data set. (e.g ``FB``) - Choices are case-insensitive. + Choices are case-sensitive. When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. @@ -370,7 +370,7 @@ batch ``MEMBER`` expects to be used with an existing partitioned data set. - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str @@ -412,7 +412,7 @@ batch record_format The format of the data set. (e.g ``FB``) - Choices are case-insensitive. + Choices are case-sensitive. When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. @@ -568,7 +568,7 @@ Examples - name: Create a sequential data set if it does not exist zos_data_set: name: someds.name.here - type: seq + type: SEQ state: present - name: Create a PDS data set if it does not exist @@ -577,26 +577,26 @@ Examples type: pds space_primary: 5 space_type: M - record_format: fba + record_format: FBA record_length: 25 - name: Attempt to replace a data set if it exists zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 replace: yes - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: name: someds.name.here - type: pds + type: PDS space_primary: 5 space_type: M - record_format: u + record_format: U record_length: 25 volumes: "222222" replace: yes @@ -604,19 +604,19 @@ Examples - name: Create an ESDS data set if it does not exist zos_data_set: name: someds.name.here - type: esds + type: ESDS - name: Create a KSDS data set if it does not exist zos_data_set: name: someds.name.here - type: ksds + type: KSDS key_length: 8 key_offset: 0 - name: Create an RRDS data set with storage class MYDATA if it does not exist zos_data_set: name: someds.name.here - type: rrds + type: RRDS sms_storage_class: mydata - name: Delete a data set if it exists @@ -661,7 +661,7 @@ Examples type: PDS space_primary: 5 space_type: M - record_format: fb + record_format: FB replace: yes - name: someds.name.here1(member1) type: MEMBER diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 21b573a2a..87a50a65a 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -204,7 +204,7 @@ Notes For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - `zos_fetch <./zos_fetch.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 6cff37a6a..8f4dda61b 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -16,9 +16,8 @@ zos_job_submit -- Submit JCL Synopsis -------- -- Submit JCL from a data set, USS, or from the controller. -- Submit a job and optionally monitor for completion. -- Optionally, wait a designated time until the job finishes. +- Submit JCL in a data set, USS file, or file on the controller. +- Submit a job and monitor for completion. - For an uncataloged dataset, specify the volume serial number. @@ -57,18 +56,6 @@ location | **choices**: DATA_SET, USS, LOCAL -wait - Setting this option will yield no change, it is deprecated. There is no no need to set *wait*; setting *wait_times_s* is the correct way to configure the amount of tme to wait for a job to execute. - - Configuring wait used by the `zos_job_submit <./zos_job_submit.html>`_ module has been deprecated and will be removed in ibm.ibm_zos_core collection. - - See option *wait_time_s*. - - | **required**: False - | **type**: bool - | **default**: False - - wait_time_s Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. @@ -333,6 +320,8 @@ Notes .. note:: For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + @@ -689,24 +678,46 @@ jobs } msg - Return code resulting from the job submission. Jobs that take longer to assign a value can have a value of '?'. + Job status resulting from the job submission. + + Job status `ABEND` indicates the job ended abnormally. + + Job status `AC` indicates the job is active, often a started task or job taking long. + + Job status `CAB` indicates a converter abend. + + Job status `CANCELED` indicates the job was canceled. + + Job status `CNV` indicates a converter error. + + Job status `FLU` indicates the job was flushed. + + Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. + + Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. + + Job status `SYS` indicates a system failure. + + Job status `?` indicates status can not be determined. | **type**: str - | **sample**: CC 0000 + | **sample**: AC msg_code - Return code extracted from the `msg` so that it can be evaluated as a string. Jobs that take longer to assign a value can have a value of '?'. + The return code from the submitted job as a string. | **type**: str msg_txt - Returns additional information related to the job. Jobs that take longer to assign a value can have a value of '?'. + Returns additional information related to the submitted job. | **type**: str - | **sample**: The job completion code (CC) was not available in the job output, please review the job log." + | **sample**: The job JOB00551 was run with special job processing TYPRUN=SCAN. This will result in no completion, return code or job steps and changed will be false. code - Return code converted to an integer value (when possible). For JCL ERRORs, this will be None. + The return code converted to an integer value when available. + + Jobs which have no return code will return NULL, such is the case of a job that errors or is active. | **type**: int diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index f51096361..31b237588 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -296,9 +296,7 @@ Notes For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine. - - `zos_copy <./zos_copy.html>`_ uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from using Co:Z thus falling back to using standard SFTP. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index f3cdb0254..4af6b1b52 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -72,7 +72,7 @@ Examples - LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC max_rc: 4 - - name: Execute TSO command to run explicitly a REXX script from a data set. + - name: Execute TSO command to run a REXX script explicitly from a data set. zos_tso_command: commands: - EXEC HLQ.DATASET.REXX exec diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index da80bd31a..91fa597ee 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -362,14 +362,14 @@ Examples # Simple extract - name: Copy local tar file and unpack it on the managed z/OS node. zos_unarchive: - path: "./files/archive_folder_test.tar" + src: "./files/archive_folder_test.tar" format: name: tar # use include - name: Unarchive a bzip file selecting only a file to unpack. zos_unarchive: - path: "/tmp/test.bz2" + src: "/tmp/test.bz2" format: name: bz2 include: @@ -378,7 +378,7 @@ Examples # Use exclude - name: Unarchive a terse data set and excluding data sets from unpacking. zos_unarchive: - path: "USER.ARCHIVE.RESULT.TRS" + src: "USER.ARCHIVE.RESULT.TRS" format: name: terse exclude: @@ -388,7 +388,7 @@ Examples # List option - name: List content from XMIT zos_unarchive: - path: "USER.ARCHIVE.RESULT.XMIT" + src: "USER.ARCHIVE.RESULT.XMIT" format: name: xmit format_options: @@ -404,6 +404,8 @@ Notes .. note:: VSAMs are not supported. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + See Also @@ -411,7 +413,7 @@ See Also .. seealso:: - - :ref:`zos_unarchive_module` + - :ref:`zos_archive_module` @@ -420,14 +422,14 @@ Return Values ------------- -path - File path or data set name unarchived. +src + File path or data set name unpacked. | **returned**: always | **type**: str dest_path - Destination path where archive was extracted. + Destination path where archive was unpacked. | **returned**: always | **type**: str diff --git a/docs/source/plugins.rst b/docs/source/plugins.rst index 5c8605ad3..ef0f6c183 100644 --- a/docs/source/plugins.rst +++ b/docs/source/plugins.rst @@ -5,30 +5,33 @@ Plugins ======= -Plugins that come with the **IBM z/OS core collection** augment Ansible's core +Plugins that come with the **IBM z/OS core collection** complement Ansible's core functionality. Ansible uses a plugin architecture to enable a rich, flexible and expandable feature set. Action ------ -* ``zos_ping``: Manages the REXX source transferred to the z/OS managed node for - `zos_ping`_. -* ``zos_copy``: Used to `copy data`_ from the controller to the z/OS managed - node. -* ``zos_fetch``: Used to `fetch data`_ from the z/OS managed node to the - controller. -* ``zos_job_submit``: Used to `submit a job`_ from the controller and optionally - monitor the job completion. +Action plugins integrate local processing and local data with module functionality. +Action plugins are executed by default when an associated module is used; no additional +user action is required, this documentation is reference only. -.. _normal: - https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/action/normal.py -.. _zos_ping: - modules/zos_ping.html -.. _copy data: +* `zos_copy`_: Used to copy data from the controller to the z/OS manage node. +* `zos_fetch`_: Used to fetch data from the z/OS managed node to the controller. +* `zos_job_submit`_: Used to submit a job from the controller to the z/OS manage node. +* `zos_ping`_: Used to transfer the modules REXX source to the z/OS managed node. +* `zos_script`_: Used to transfer scripts from the controller to the z/OS manage node. +* `_zos_unarchive`_: Used to transfer archives from the controller to the z/OS manage node. + +.. _zos_copy: modules/zos_copy.html -.. _fetch data: +.. _zos_fetch: modules/zos_fetch.html -.. _submit a job: +.. _zos_job_submit: modules/zos_job_submit.html - +.. _zos_ping: + modules/zos_ping.html +.. _zos_script: + modules/zos_script.html +.. _zos_unarchive: + modules/zos_unarchive.html diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 726c1b64c..7c2c3a929 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -1,13 +1,22 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020, 2021, 2023 . +.. © Copyright IBM Corporation 2020, 2024 . .. ........................................................................... ======== Releases ======== -Version 1.9.0-beta.1 -==================== +Version 1.9.0 +============= + +Major Changes +------------- + - IBM Ansible z/OS core collection (**ibm_zos_core**) version 1.9.0 will be the last release to support ZOAU 1.2.x. + + - IBM Ansible z/OS core version 1.9.0 will continue to receive security updates and bug fixes. + + - Starting with IBM Ansible z/OS core version 1.10.0, ZOAU version 1.3.0 will be required. + - IBM Open Enterprise SDK for Python version 3.9.x is no longer supported. Minor Changes ------------- @@ -21,7 +30,24 @@ Minor Changes - Improved messages in the action plugin. - Improved the action plugin performance, flow and use of undocumented variables. - Improved the modules handling of ZOAU import errors allowing for the traceback to flow back to the source. -- ``zos_tso_command`` - Has been updated with a new example demonstrating how to explicitly execute a REXX script in a data set. + - Improved job status support, now the supported statuses for property **ret_code[msg]** are: + + - Job status **ABEND** indicates the job ended abnormally. + - Job status **AC** indicates the job is active, often a started task or job taking long. + - Job status **CAB** indicates a converter abend. + - Job status **CANCELED** indicates the job was canceled. + - Job status **CNV** indicates a converter error. + - Job status **FLU** indicates the job was flushed. + - Job status **JCLERR** or **JCL ERROR** indicates the JCL has an error. + - Job status **SEC** or **SEC ERROR** indicates the job as encountered a security error. + - Job status **SYS** indicates a system failure. + - Job status **?** indicates status can not be determined. + +- ``zos_tso_command`` + + - Has been updated with a new example demonstrating how to explicitly execute a REXX script in a data set. + - Has been updated with a new example demonstrating how to chain multiple TSO commands into one invocation using semicolons. + - ``zos_mvs_raw`` - Has been enhanced to ensure that **instream-data** for option **dd_input** contain blanks in columns 1 and 2 while retaining a maximum length @@ -33,40 +59,69 @@ Minor Changes Bugfixes -------- +- ``zos_apf`` - Fixed an issue that when **operation=list** was selected and more than one data set entry was fetched, only one + data set was returned, now the complete list is returned. + - ``zos_copy`` - - Fixed an issue when copying an aliased executable from a data set to a non-existent data set, the destination data sets primary - and secondary extents would not match the source data set extent sizes. + - Fixed an issue that when copying an aliased executable from a data set to a non-existent data set, the destination + datasets primary and secondary extents would not match the source data set extent sizes. - Fixed an issue when performing a copy operation to an existing file, the copied file resulted in having corrupted contents. -- ``zos_job_output`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix +- ``zos_job_submit`` + + - Fixed an issue that when no **location** is set, the default is not correctly configured to **location=DATA_SET**. + - Fixed an issue that when a JCL error is encountered, the **ret_code[msg_code]** no longer will contain the multi line marker used to coordinate errors. + - Fixed an issue that when a response was returned, the property **ret_code[msg_text]** was incorrectly returned over **ret_code[msg_txt]**. + - Fixed an issue that when JCL contained **TYPRUN=SCAN**, the module would fail. The module no longer fails and an appropriate message and response is returned. + - Fixed an issue that when JCL contained either **TYPRUN=COPY**, **TYPRUN=HOLD**, or **TYPRUN=JCLHOLD** an improper message was returned and the job submission failed. + Now the job will fail under the condition that the module has exceeded its wait time and return a proper message. + - Fixed an issue where when option **wait_time_s** was used, the duration would be approximately 5 seconds longer than what was reported in the duration. + Now the duration is from when the job is submitted to when the module reads the job output. + +- ``zos_job_output`` - Fixed an issue that when using a job ID with less than 8 characters, would result in a traceback. The fix supports shorter job IDs as well as the use of wildcards. -- ``zos_job_query`` - Fixed an issue that when using a job ID with less than 8 characters would result in a traceback. The fix +- ``zos_job_query`` - Fixed an issue that when using a job ID with less than 8 characters, would result in a traceback. The fix supports shorter job IDs as well as the use of wildcards. - ``zos_unarchive`` - - Fixed an issue when using a local file with the USS format option that would fail sending it to the managed node. - - Fixed an issue that occurred when unarchiving USS files that would leave temporary files behind on the managed node. + - Fixed an issue that when using a local file with the USS format option, the module would fail to send the archive to the managed node. + - Fixed an issue that occurred when unarchiving USS files, the module would leave temporary files behind on the managed node. + +- ``module_utils`` + + - ``job.py`` - Improved exception handling and added a message inside the **content** of the **ddname** when a non-printable + character (character that can not be converted to UTF-8) is encountered. + - ``data_set.py`` - Fixed an issue that when a volser name less than 6 characters was encountered, the volser name was padded with hyphens to have length 6. + Known Issues ------------ Several modules have reported UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. -This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, -``zos_operator_action_query``` but are not limited to this list. This will be addressed in **ibm_zos_core** version 1.10.0-beta.1. Each case is -unique, some options to work around the error are below. +- This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules ``zos_job_submit``, ``zos_job_output``, + ``zos_operator_action_query``` but are not limited to this list. This has been addressed in this release and corrected with **ZOAU version 1.2.5.6**. +- If the appropriate level of ZOAU can not be installed, some options are to: -- Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. -- Add **ignore_errors:true** to the playbook task so the task error will not fail the playbook. -- If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a variable and extract the job ID with - a regular expression and then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. + - Ignore module errors by using **ignore_errors:true** for a specific playbook task. + - If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a registered variable to extract the + job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. + +An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended +and documented **space_primary** option. + +In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, +this is so that the collection can continue to maintain certified status. Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ @@ -75,7 +130,7 @@ Reference * Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 * Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ +* Supported by `IBM Open Enterprise SDK for Python`_ `3.10`_ - `3.12`_ * Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3. Version 1.8.0 @@ -978,6 +1033,8 @@ Known issues https://www.ibm.com/docs/en/python-zos/3.10 .. _3.11: https://www.ibm.com/docs/en/python-zos/3.11 +.. _3.12: + https://www.ibm.com/docs/en/python-zos/3.12 .. _Z Open Automation Utilities 1.1.0: https://www.ibm.com/docs/en/zoau/1.1.x .. _Z Open Automation Utilities 1.1.1: diff --git a/galaxy.yml b/galaxy.yml index 93af5d038..c408424aa 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.9.0-beta.1 +version: 1.10.0-beta.1 # Collection README file readme: README.md @@ -96,4 +96,5 @@ build_ignore: - tests/sanity/ignore-2.11.txt - tests/sanity/ignore-2.12.txt - tests/sanity/ignore-2.13.txt + - tests/sanity/ignore-2.14.txt - venv* diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index abab47f9c..7e24bc280 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,10 +1,10 @@ name: ibm_zos_core -version: "1.9.0-beta.1" +version: "1.10.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" - version: ">=3.9" + version: ">=3.10" - name: "Z Open Automation Utilities" version: - - "1.2.5" + - "1.3.0" diff --git a/meta/runtime.yml b/meta/runtime.yml index be99ccf4b..898ad8ff5 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.14.0' +requires_ansible: '>=2.15.0' diff --git a/plugins/filter/wtor.py b/plugins/filter/wtor.py index 28e908376..17b530218 100644 --- a/plugins/filter/wtor.py +++ b/plugins/filter/wtor.py @@ -12,6 +12,61 @@ from __future__ import absolute_import, division, print_function __metaclass__ = type + +DOCUMENTATION = r""" +name: filter_wtor_messages +author: Demetrios Dimatos (@ddimatos) +version_added: "1.2.0" +short_description: Filter a list of WTOR messages +description: + - Filter a list of WTOR (write to operator with reply) messages found by + module zos_operator_action_query. + - Filter using a string or regular expression. +options: + wtor_response: + description: + - A list containing response property `message_text`, provided the + module zos_operator_action_query. + - The list can be the outstanding messages found in the modules + response under the `actions` property or the entire module + response. + type: list + required: true + text: + description: + - String of text to match or a regular expression to use as filter criteria. + type: str + required: true + ingore_case: + description: + - Should the filter enable case sensitivity when performing a match. + type: bool + required: false + default: false +""" + +EXAMPLES = r""" +- name: Filter actionable messages that match 'IEE094D SPECIFY OPERAND' and if so, set is_specify_operand = true. + set_fact: + is_specify_operand: "{{ result | ibm.ibm_zos_core.filter_wtor_messages('IEE094D SPECIFY OPERAND') }}" + when: result is defined and not result.failed + +- name: Evaluate if there are any existing dump messages matching 'IEE094D SPECIFY OPERAND' + assert: + that: + - is_specify_operand is defined + - bool_zos_operator_action_continue + success_msg: "Found 'IEE094D SPECIFY OPERAND' message." + fail_msg: "Did not find 'IEE094D SPECIFY OPERAND' message." +""" + +RETURN = r""" + _value: + description: A list containing dictionaries matching the WTOR. + type: list + elements: dict +""" + import re diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 1f49a2b26..25483b45d 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -21,7 +21,7 @@ # Only importing this module so we can catch a JSONDecodeError that sometimes happens # when a job's output has non-printable chars that conflict with JSON's control # chars. -from json import decoder +from json import JSONDecodeError from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) @@ -29,6 +29,12 @@ ZOAUImportError ) +try: + from zoautil_py import exceptions +except ImportError: + exceptions = ZOAUImportError(traceback.format_exc()) + + try: # For files that import individual functions from a ZOAU module, # we'll replace the imports to instead get the module. @@ -40,6 +46,18 @@ except Exception: jobs = ZOAUImportError(traceback.format_exc()) +JOB_ERROR_STATUSES = frozenset(["ABEND", # ZOAU job ended abnormally + "SEC ERROR", # Security error (legacy Ansible code) + "SEC", # ZOAU security error + "JCL ERROR", # Job had a JCL error (legacy Ansible code) + "JCLERR", # ZOAU job had a JCL error + "CANCELED", # ZOAU job was cancelled + "CAB", # ZOAU converter abend + "CNV", # ZOAU converter error + "SYS", # ZOAU system failure + "FLU" # ZOAU job was flushed + ]) + def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. @@ -89,11 +107,6 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru start_time=start_time ) - # while ((job_detail is None or len(job_detail) == 0) and duration <= timeout): - # current_time = timer() - # duration = round(current_time - start_time) - # sleep(1) - if len(job_detail) == 0: # some systems have issues with "*" while some require it to see results job_id = "" if job_id == "*" else job_id @@ -238,17 +251,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T # Preserve the original job_id for the failure path job_id_temp = job_id - # jls output: owner=job[0], name=job[1], id=job[2], status=job[3], rc=job[4] - # e.g.: OMVSADM HELLO JOB00126 JCLERR ? - # jobs.listing(job_id, owner) in 1.2.0 has owner param, 1.1 does not - # jls output has expanded in zoau 1.2.3 and later: jls -l -v shows headers - # jobclass=job[5] serviceclass=job[6] priority=job[7] asid=job[8] - # creationdatetime=job[9] queueposition=job[10] - # starting in zoau 1.2.4, program_name[11] was added. In 1.3.0, include_extended - # has to be set to true so we get the program name for a job. - # Testing has shown that the program_name impact is minor, so we're removing that option - final_entries = [] + + # In 1.3.0, include_extended has to be set to true so we get the program name for a job. entries = jobs.fetch_multiple(job_id=job_id_temp, include_extended=True) while ((entries is None or len(entries) == 0) and duration <= timeout): @@ -276,25 +281,17 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["system"] = "" job["owner"] = entry.owner - job["ret_code"] = dict() - # From v1.3.0, ZOAU sets unavailable job fields as None, instead of '?'. - # This new way of constructing msg allows for a better empty message. - # "" instead of "None None". - job["ret_code"]["msg"] = "{0} {1}".format( - entry.status if entry.status else "", - entry.return_code if entry.return_code else "" - ).strip() - + job["ret_code"] = {} + job["ret_code"]["msg"] = entry.status job["ret_code"]["msg_code"] = entry.return_code job["ret_code"]["code"] = None if entry.return_code and len(entry.return_code) > 0: if entry.return_code.isdigit(): job["ret_code"]["code"] = int(entry.return_code) - job["ret_code"]["msg_text"] = entry.status if entry.status else "?" + job["ret_code"]["msg_txt"] = entry.status - # Beginning in ZOAU v1.3.0, the Job class changes svc_class to - # service_class. + # Beginning in ZOAU v1.3.0, the Job class changes svc_class to service_class. job["svc_class"] = entry.service_class job["job_class"] = entry.job_class job["priority"] = entry.priority @@ -310,16 +307,45 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["duration"] = duration if dd_scan: - list_of_dds = jobs.list_dds(entry.job_id) - while ((list_of_dds is None or len(list_of_dds) == 0) and duration <= timeout): + # If true, it means the job is not ready for DD queries and the duration and + # timeout should apply here instructing the user to add more time + is_dd_query_exception = False + is_jesjcl = False + list_of_dds = [] + + try: + list_of_dds = jobs.list_dds(entry.job_id) + except exceptions.DDQueryException as err: + if 'BGYSC5201E' in str(err): + is_dd_query_exception = True + pass + + # Check if the Job has JESJCL, if not, its in the JES INPUT queue, thus wait the full wait_time_s. + # Idea here is to force a TYPRUN{HOLD|JCLHOLD|COPY} job to go the full wait duration since we have + # currently no way to detect them, but if we know the job is one of the JOB_ERROR_STATUS lets + # exit the wait time supplied as we know it is a job failure. + is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False + is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False + + while ((list_of_dds is None or len(list_of_dds) == 0 or is_dd_query_exception) and + (not is_jesjcl and not is_job_error_status and duration <= timeout)): current_time = timer() duration = round(current_time - start_time) sleep(1) - list_of_dds = jobs.list_dds(entry.job_id) + try: + # Note, in the event of an exception, eg job has TYPRUN=HOLD + # list_of_dds will still be populated with valuable content + list_of_dds = jobs.list_dds(entry.job_id) + is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False + is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False + except exceptions.DDQueryException as err: + if 'BGYSC5201E' in str(err): + is_dd_query_exception = True + continue job["duration"] = duration - for single_dd in list_of_dds: + dd = {} if "dd_name" not in single_dd: @@ -360,23 +386,24 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T tmpcont = None if "step_name" in single_dd: if "dd_name" in single_dd: - # In case ZOAU fails when reading the job output, we'll - # add a message to the user telling them of this. - # ZOAU cannot read partial output from a job, so we - # have to make do with nothing from this step if it fails. + # In case ZOAU fails when reading the job output, we'll add a + # message to the user telling them of this. ZOAU cannot read + # partial output from a job, so we have to make do with nothing + # from this step if it fails. try: tmpcont = jobs.read_output( entry.job_id, single_dd["step_name"], single_dd["dd_name"] ) - except (UnicodeDecodeError, decoder.JSONDecodeError): + except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError) as e: tmpcont = ( "Non-printable UTF-8 characters were present in this output. " - "Please access it manually." + "Please access it from the job log." ) dd["content"] = tmpcont.split("\n") + job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) job["ddnames"].append(dd) @@ -397,16 +424,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["subsystem"] = (tmptext.split("\n")[ 0]).replace(" ", "") - # Extract similar: "19.49.44 JOB06848 IEFC452I DOCEASYT - JOB NOT RUN - JCL ERROR 029 " - # then further reduce down to: 'JCL ERROR 029' - if job["ret_code"]["msg_code"] == "?": - if "JOB NOT RUN -" in tmpcont: - tmptext = tmpcont.split( - "JOB NOT RUN -")[1].split("\n")[0] - job["ret_code"]["msg"] = tmptext.strip() - job["ret_code"]["msg_code"] = None - job["ret_code"]["code"] = None - final_entries.append(job) if not final_entries: final_entries = _job_not_found(job_id, owner, job_name, "unavailable") @@ -439,3 +456,25 @@ def _ddname_pattern(contents, resolve_dependencies): ) ) return str(contents) + + +def search_dictionaries(key, value, list_of_dictionaries): + """ Searches a list of dictionaries given key and returns + the value dictionary. + + Arguments: + key {str} -- dictionary key to search for. + value {str} -- value to match for the dictionary key + list {str} -- list of dictionaries + + Returns: + dictionary -- dictionary matching the key and value + + Raises: + TypeError -- When input is not a list of dictionaries + """ + if not isinstance(list_of_dictionaries, list): + raise TypeError( + "Unsupported type for 'list_of_dictionaries', must be a list of dictionaries") + + return [element for element in list_of_dictionaries if element[key] == value] diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 6991c4d81..9acb3c1c6 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -94,6 +94,10 @@ - C(dest) can be a USS file, directory or MVS data set name. - If C(dest) has missing parent directories, they will be created. - If C(dest) is a nonexistent USS file, it will be created. + - If C(dest) is a new USS file or replacement, the file will be appropriately tagged with + either the system's default locale or the encoding option defined. If the USS file is + a replacement, the user must have write authority to the file either through ownership, + group or other permissions, else the module will fail. - If C(dest) is a nonexistent data set, it will be created following the process outlined here and in the C(volume) option. - If C(dest) is a nonexistent data set, the attributes assigned will depend on the type of @@ -467,15 +471,16 @@ - VSAM data sets can only be copied to other VSAM data sets. - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - - L(zos_copy,./zos_copy.html) uses SFTP (Secure File Transfer Protocol) for the underlying - transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, - you can exempt the Ansible userid on z/OS from using Co:Z thus falling back - to using standard SFTP. - - Beginning in version 1.8.x, zos_copy will no longer attempt to autocorrect a copy of a data type member - into a PDSE that contains program objects. You can control this behavior using module option - executable that will signify an executable is being copied into a PDSE with other - executables. Mixing data type members with program objects will be responded with a - (FSUM8976,./zos_copy.html) error. + - This module uses SFTP (Secure File Transfer Protocol) for the underlying + transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the + case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling + back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for + transfers, if not available, the module will fail. + - Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of + a data type member into a PDSE that contains program objects. You can control this + behavior using module option C(executable) that will signify an executable is being + copied into a PDSE with other executables. Mixing data type members with program + objects will result in a (FSUM8976,./zos_copy.html) error. seealso: - module: zos_fetch - module: zos_data_set diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index dc4bc8071..cc26b622b 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -146,10 +146,11 @@ - Fetching HFS or ZFS type data sets is currently not supported. - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - - L(zos_fetch,./zos_fetch.html) uses SFTP (Secure File Transfer Protocol) for the underlying - transfer protocol; Co:Z SFTP is not supported. In the case of Co:z SFTP, - you can exempt the Ansible userid on z/OS from using Co:Z thus falling back - to using standard SFTP. + - This module uses SFTP (Secure File Transfer Protocol) for the underlying + transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the + case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling + back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for + transfers, if not available, the module will fail. seealso: - module: zos_data_set - module: zos_copy diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 1fd5030b5..7c66c2543 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -25,9 +25,8 @@ - "Demetrios Dimatos (@ddimatos)" short_description: Submit JCL description: - - Submit JCL from a data set, USS, or from the controller. - - Submit a job and optionally monitor for completion. - - Optionally, wait a designated time until the job finishes. + - Submit JCL in a data set, USS file, or file on the controller. + - Submit a job and monitor for completion. - For an uncataloged dataset, specify the volume serial number. version_added: "1.0.0" options: @@ -126,6 +125,13 @@ notes: - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). + - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to + the remote machine which uses SFTP (Secure File Transfer Protocol) for the + underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not + supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS + from using Co:Z thus falling back to using standard SFTP. If the module detects + SCP, it will temporarily use SFTP for transfers, if not available, the module + will fail. """ RETURN = r""" @@ -217,28 +223,40 @@ contains: msg: description: - Return code resulting from the job submission. Jobs that take - longer to assign a value can have a value of '?'. + - Job status resulting from the job submission. + - Job status `ABEND` indicates the job ended abnormally. + - Job status `AC` indicates the job is active, often a started task or job taking long. + - Job status `CAB` indicates a converter abend. + - Job status `CANCELED` indicates the job was canceled. + - Job status `CNV` indicates a converter error. + - Job status `FLU` indicates the job was flushed. + - Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. + - Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. + - Job status `SYS` indicates a system failure. + - Job status `?` indicates status can not be determined. + - Jobs where status can not be determined will result in None (NULL). type: str - sample: CC 0000 + sample: AC msg_code: description: - Return code extracted from the `msg` so that it can be evaluated - as a string. Jobs that take longer to assign a value can have a - value of '?'. + - The return code from the submitted job as a string. + - Jobs which have no return code will result in None (NULL), such + is the case of a job that errors or is active. type: str sample: 0000 msg_txt: description: - Returns additional information related to the job. Jobs that take - longer to assign a value can have a value of '?'. + - Returns additional information related to the submitted job. + - Jobs which have no additional information will result in None (NULL). type: str - sample: The job completion code (CC) was not available in the job - output, please review the job log." + sample: The job JOB00551 was run with special job processing TYPRUN=SCAN. + This will result in no completion, return code or job steps and + changed will be false. code: description: - Return code converted to an integer value (when possible). - For JCL ERRORs, this will be None. + - The return code converted to an integer value when available. + - Jobs which have no return code will result in None (NULL), such + is the case of a job that errors or is active. type: int sample: 0 steps: @@ -537,15 +555,10 @@ "system": "STL1" } ] -message: - description: This option is being deprecated - returned: success - type: str - sample: Submit JCL operation succeeded. """ EXAMPLES = r""" -- name: Submit JCL in a PDSE member +- name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) location: DATA_SET @@ -597,7 +610,7 @@ BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.job import ( - job_output, + job_output, search_dictionaries, JOB_ERROR_STATUSES ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( ZOAUImportError, @@ -627,8 +640,10 @@ jobs = ZOAUImportError(traceback.format_exc()) -JOB_COMPLETION_MESSAGES = frozenset(["CC", "ABEND", "SEC ERROR", "JCL ERROR", "JCLERR"]) -JOB_ERROR_MESSAGES = frozenset(["ABEND", "SEC ERROR", "SEC", "JCL ERROR", "JCLERR"]) +JOB_STATUSES = list(dict.fromkeys(JOB_ERROR_STATUSES)) +JOB_STATUSES.append("CC") + +JOB_SPECIAL_PROCESSING = frozenset(["TYPRUN"]) MAX_WAIT_TIME_S = 86400 @@ -693,23 +708,39 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N # which is what ZOAU sends back, opitonally we can check the 'status' as # that is sent back as `AC` when the job is not complete but the problem # with monitoring 'AC' is that STARTED tasks never exit the AC status. + job_fetched = None + job_fetch_rc = None + job_fetch_status = None + if job_submitted: - job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code - job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status + try: + job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0] + job_fetch_rc = job_fetched.return_code + job_fetch_status = job_fetched.status + except zoau_exceptions.JobFetchException: + pass # Before moving forward lets ensure our job has completed but if we see - # status that matches one in JOB_ERROR_MESSAGES, don't wait, let the code - # drop through and get analyzed in the main as it will scan the job ouput. - # Any match to JOB_ERROR_MESSAGES ends our processing and wait times. - while (job_fetch_status not in JOB_ERROR_MESSAGES and + # status that matches one in JOB_STATUSES, don't wait, let the code + # drop through and get analyzed in the main as it will scan the job ouput + # Any match to JOB_STATUSES ends our processing and wait times + while (job_fetch_status not in JOB_STATUSES and job_fetch_status == 'AC' and ((job_fetch_rc is None or len(job_fetch_rc) == 0 or job_fetch_rc == '?') and duration < timeout)): current_time = timer() duration = round(current_time - start_time) sleep(1) - job_fetch_rc = jobs.fetch_multiple(job_submitted.job_id)[0].return_code - job_fetch_status = jobs.fetch_multiple(job_submitted.job_id)[0].status + try: + job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0] + job_fetch_rc = job_fetched.return_code + job_fetch_status = job_fetched.status + # Allow for jobs that need more time to be fectched to run the wait_time_s + except zoau_exceptions.JobFetchException as err: + if duration >= timeout: + raise err + else: + continue # ZOAU throws a JobSubmitException when the job sumbission fails thus there is no # JCL RC to share with the user, if there is a RC, that will be processed @@ -736,11 +767,12 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N result["stderr"] = to_text(err) result["duration"] = duration result["job_id"] = job_submitted.job_id + _msg_detail = "the job with status {0}".format(job_fetch_status) if job_fetch_status else "its status" result["msg"] = ("The JCL has been submitted {0} with ID {1} but there was an " - "error while fetching its status within the allocated time of {2} " + "error while fetching {2} within the allocated time of {3} " "seconds. Consider using module zos_job_query to poll for the " "job for more information. Standard error may have additional " - "information.".format(src_name, job_submitted.job_id, str(timeout))) + "information.".format(src_name, job_submitted.job_id, _msg_detail, str(timeout))) module.fail_json(**result) # Between getting a job_submitted and the jobs.fetch_multiple(job_submitted.job_id)[0].return_code @@ -882,7 +914,7 @@ def run_module(): if wait_time_s <= 0 or wait_time_s > MAX_WAIT_TIME_S: result["failed"] = True - result["msg"] = ("The value for option `wait_time_s` is not valid, it must " + result["msg"] = ("The value for option 'wait_time_s' is not valid, it must " "be greater than 0 and less than {0}.".format(str(MAX_WAIT_TIME_S))) module.fail_json(**result) @@ -899,29 +931,39 @@ def run_module(): job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=True) - try: - # Explictly pass None for the unused args else a default of '*' will be - # used and return undersirable results - job_output_txt = None + # Explictly pass None for the unused args else a default of '*' will be + # used and return undersirable results + job_output_txt = None + try: job_output_txt = job_output( job_id=job_submitted_id, owner=None, job_name=None, dd_name=None, dd_scan=return_output, duration=duration, timeout=wait_time_s, start_time=start_time) + # This is resolvig a bug where the duration coming from job_output is passed by value, duration + # being an immutable type can not be changed and must be returned or accessed from the job.py. + if job_output is not None: + duration = job_output_txt[0].get("duration") if not None else duration + result["duration"] = duration if duration >= wait_time_s: result["failed"] = True result["changed"] = False + _msg = ("The JCL submitted with job id {0} but appears to be a long " + "running job that exceeded its maximum wait time of {1} " + "second(s). Consider using module zos_job_query to poll for " + "a long running job or increase option 'wait_times_s' to a value " + "greater than {2}.".format(str(job_submitted_id), str(wait_time_s), str(duration))) + _msg_suffix = ("Consider using module zos_job_query to poll for " + "a long running job or increase option 'wait_times_s' to a value " + "greater than {0}.".format(str(duration))) + if job_output_txt is not None: result["jobs"] = job_output_txt - result["msg"] = ( - "The JCL submitted with job id {0} but appears to be a long " - "running job that exceeded its maximum wait time of {1} " - "second(s). Consider using module zos_job_query to poll for " - "a long running job or increase option 'wait_times_s` to a value " - "greater than {2}.".format( - str(job_submitted_id), str(wait_time_s), str(duration))) + job_ret_code = job_output_txt[0].get("ret_code") + job_ret_code.update({"msg_txt": _msg_suffix}) + result["msg"] = _msg module.exit_json(**result) # Job has submitted, the module changed the managed node @@ -932,35 +974,76 @@ def run_module(): job_ret_code = job_output_txt[0].get("ret_code") if job_ret_code: - job_msg = job_ret_code.get("msg") - job_code = job_ret_code.get("code") - - # retcode["msg"] should never be empty where a retcode["code"] can be None, - # "msg" could be an ABEND which has no corresponding "code" - if job_msg is None: - _msg = ("Unable to find a 'msg' in the 'ret_code' dictionary, " - "please review the job log.") - result["stderr"] = _msg - raise Exception(_msg) + job_ret_code_msg = job_ret_code.get("msg") + job_ret_code_code = job_ret_code.get("code") + job_ret_code_msg_code = job_ret_code.get("msg_code") if return_output is True and max_rc is not None: - is_changed = assert_valid_return_code(max_rc, job_code, job_ret_code) - - if re.search("^(?:{0})".format("|".join(JOB_COMPLETION_MESSAGES)), job_msg): - # If the job_msg doesn't have a CC, it is an improper completion (error/abend) - if re.search("^(?:CC)", job_msg) is None: - _msg = ("The job completion code (CC) was not in the job log. " - "Please review the error {0} and the job log.".format(job_msg)) - result["stderr"] = _msg + is_changed = assert_valid_return_code(max_rc, job_ret_code_code, job_ret_code, result) + + if job_ret_code_msg is not None: + if re.search("^(?:{0})".format("|".join(JOB_STATUSES)), job_ret_code_msg): + # If the job_ret_code_msg doesn't have a CC (completion code), the job failed. + if re.search("^(?:CC)", job_ret_code_msg) is None: + _msg = ("The job completion code (CC) was not in the job log. " + "please review the job log for status {0}.".format(job_ret_code_msg)) + result["stderr"] = _msg + job_ret_code.update({"msg_txt": _msg}) + raise Exception(_msg) + + if job_ret_code_code is None: + # If there is no job_ret_code_code (Job return code) it may NOT be an error, + # some jobs will never return have an RC, eg Jobs with TYPRUN=*, + # Started tasks (which are not supported) so further analyze the + # JESJCL DD to figure out if its a TYPRUN job + + job_dd_names = job_output_txt[0].get("ddnames") + jes_jcl_dd = search_dictionaries("ddname", "JESJCL", job_dd_names) + + # Its possible jobs don't have a JESJCL which are active and this would + # cause an index out of range error. + if not jes_jcl_dd: + _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." + _msg = ("The job return code was not available in the job log, " + "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg_txt": _msg}) raise Exception(_msg) - if job_code is None: - raise Exception("The job return code was not available in the job log, " - "please review the job log and error {0}.".format(job_msg)) - - if job_code != 0 and max_rc is None: - raise Exception("The job return code {0} was non-zero in the " - "job output, this job has failed.".format(str(job_code))) + jes_jcl_dd_content = jes_jcl_dd[0].get("content") + jes_jcl_dd_content_str = " ".join(jes_jcl_dd_content) + + # The regex can be r"({0})\s*=\s*(COPY|HOLD|JCLHOLD|SCAN)" once zoau support is in. + special_processing_keyword = re.search(r"({0})\s*=\s*(SCAN)" + .format("|".join(JOB_SPECIAL_PROCESSING)), jes_jcl_dd_content_str) + + if special_processing_keyword: + job_ret_code.update({"msg": special_processing_keyword[0]}) + job_ret_code.update({"code": None}) + job_ret_code.update({"msg_code": None}) + job_ret_code.update({"msg_txt": "The job {0} was run with special job " + "processing {1}. This will result in no completion, " + "return code or job steps and changed will be false." + .format(job_submitted_id, special_processing_keyword[0])}) + is_changed = False + else: + # The job_ret_code_code is None at this point, but the job_ret_code_msg_code could be populated + # so check both and provide a proper response. + + if job_ret_code_msg_code is None: + _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." + _msg = ("The job return code was not available in the job log, " + "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg_txt": _msg}) + raise Exception(_msg) + + # raise Exception("The job return code was not available in the job log, " + # "please review the job log and error {0}.".format(job_ret_code_msg)) + elif job_ret_code_code != 0 and max_rc is None: + _msg = ("The job return code {0} was non-zero in the " + "job output, this job has failed.".format(str(job_ret_code_code))) + job_ret_code.update({"msg_txt": _msg}) + result["stderr"] = _msg + raise Exception(_msg) if not return_output: for job in result.get("jobs", []): @@ -975,7 +1058,6 @@ def run_module(): result["stderr"] = _msg result["jobs"] = None raise Exception(_msg) - except Exception as err: result["failed"] = True result["changed"] = False @@ -995,27 +1077,32 @@ def run_module(): module.exit_json(**result) -def assert_valid_return_code(max_rc, job_rc, ret_code): +def assert_valid_return_code(max_rc, job_rc, ret_code, result): if job_rc is None: raise Exception( "The job return code (ret_code[code]) was not available in the jobs output, " "this job has failed.") if job_rc > max_rc: - raise Exception("The job return code, 'ret_code[code]' {0} for the submitted job is " - "greater than the value set for option 'max_rc' {1}. " - "Increase the value for 'max_rc' otherwise this job submission " - "has failed.".format(str(job_rc), str(max_rc))) + _msg = ("The job return code, 'ret_code[code]' {0} for the submitted job is " + "greater than the value set for option 'max_rc' {1}. " + "Increase the value for 'max_rc' otherwise this job submission " + "has failed.".format(str(job_rc), str(max_rc))) + ret_code.update({"msg_txt": _msg}) + result["stderr"] = _msg + raise Exception(_msg) for step in ret_code["steps"]: step_cc_rc = int(step["step_cc"]) step_name_for_rc = step["step_name"] if step_cc_rc > max_rc: - raise Exception("The step name {0} with return code {1} for the submitted job is " - "greater than the value set for option 'max_rc' {2}. " - "Increase the value for 'max_rc' otherwise this job submission " - "has failed.".format(step_name_for_rc, str(step_cc_rc), str(max_rc))) - + _msg = ("The step name {0} with return code {1} for the submitted job is " + "greater than the value set for option 'max_rc' {2}. " + "Increase the value for 'max_rc' otherwise this job submission " + "has failed.".format(step_name_for_rc, str(step_cc_rc), str(max_rc))) + ret_code.update({"msg_txt": _msg}) + result["stderr"] = _msg + raise Exception(_msg) # If there is NO exception rasied it means that max_rc is larger than the # actual RC from the submitted job. In this case, the ansible changed status # should NOT be 'changed=true' even though the user did override the return code, diff --git a/plugins/modules/zos_ping.py b/plugins/modules/zos_ping.py index 6de0cccf0..5f134cd90 100644 --- a/plugins/modules/zos_ping.py +++ b/plugins/modules/zos_ping.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_ping.rexx b/plugins/modules/zos_ping.rexx index a881146b0..beca54c3b 100644 --- a/plugins/modules/zos_ping.rexx +++ b/plugins/modules/zos_ping.rexx @@ -85,7 +85,7 @@ If (rc <> 0 | returnCode <> HWTJ_OK) Then Do failModule(errmsg, "", retC) End -/* Check for Python version >= 3.8 eg: 'Python 3.10.0' */ +/* Check for Python version >= 3.10 eg: 'Python 3.10.0' */ retC = bpxwunix('python3 --version', out., err.) If (err.0 > 0) Then Do Do index=1 To err.0 diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index b69d70b2d..0677d187d 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -116,11 +116,12 @@ - For supported character sets used to encode data, refer to the L(documentation,https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html). - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to - the remote machine. - - L(zos_copy,./zos_copy.html) uses SFTP (Secure File Transfer Protocol) - for the underlying transfer protocol; Co:Z SFTP is not supported. In - the case of Co:z SFTP, you can exempt the Ansible userid on z/OS from - using Co:Z thus falling back to using standard SFTP. + the remote machine which uses SFTP (Secure File Transfer Protocol) for the + underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not + supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS + from using Co:Z thus falling back to using standard SFTP. If the module detects + SCP, it will temporarily use SFTP for transfers, if not available, the module + will fail. - This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with L(zos_tso_command,./zos_tso_command.html). diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 6c2cb6ef6..17e190fb2 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index e9b17766c..aa315b3fb 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -29,8 +29,6 @@ - Supported sources are USS (UNIX System Services) or z/OS data sets. - Mixing MVS data sets with USS files for unarchiving is not supported. - The archive is sent to the remote as binary, so no encoding is performed. - - options: src: description: @@ -311,12 +309,17 @@ type: bool required: false default: false - notes: - VSAMs are not supported. - + - This module uses L(zos_copy,./zos_copy.html) to copy local scripts to + the remote machine which uses SFTP (Secure File Transfer Protocol) for the + underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not + supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS + from using Co:Z thus falling back to using standard SFTP. If the module detects + SCP, it will temporarily use SFTP for transfers, if not available, the module + will fail. seealso: - - module: zos_unarchive + - module: zos_archive ''' EXAMPLES = r''' diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index ee7b03157..8f6c6e072 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -57,7 +57,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=JDATA_SET_NAME, state="present", type="pds", replace=True + name=JDATA_SET_NAME, state="present", type="PDS", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) @@ -90,7 +90,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=NDATA_SET_NAME, state="present", type="pds", replace=True + name=NDATA_SET_NAME, state="present", type="PDS", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 394a087ad..bae4dbb36 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -163,7 +163,7 @@ //****************************************************************************** //* Job containing a non existent DSN that will force an error. //* Returns: -//* ret_code->(code=null, msg=JCLERR ?, msg_text=JCLERR, msg_code=?) +//* ret_code->(code=null, msg=JCLERR, msg_txt=JCLERR, msg_code=None) //* msg --> The JCL submitted with job id JOB00532 but there was an error, //* please review the error for further details: The job completion //* code (CC) was not in the job log. Please review the error @@ -198,7 +198,7 @@ //* Another job containing no job card resulting in a JCLERROR with an value. It //* won't always be 952, it will increment. //* Returns: -//* ret_code->(code=null, msg=JCL ERROR 952, msg_text=JCLERR, msg_code=null) +//* ret_code->(code=null, msg=JCLERR, msg_text=JCLERR, msg_code=null) //* msg --> The JCL submitted with job id JOB00728 but there was an error, //* please review the error for further details: The job completion //* code (CC) was not in the job log. Please review the error @@ -214,11 +214,11 @@ //* Job containing a USER=FOOBAR that will cause JES to return a SEC ERROR which //* is a security error. //* Returns: -//* ret_code->(code=null, msg=SEC ?, msg_text=SEC, msg_code=?) -//* msg --> The JCL submitted with job id JOB00464 but there was an error, +//* ret_code->(code=None, msg=SEC, msg_txt=<msg>, msg_code=?) +//* msg --> The JCL submitted with job id JOB01062 but there was an error, //* please review the error for further details: The job return code -//* was not available in the job log, please review the job log -//* and error SEC ?.", +//* was not available in the job log, please review the job log and +//* status SEC. //****************************************************************************** //INVUSER JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,USER=FOOBAR @@ -234,22 +234,102 @@ JCL_FILE_CONTENTS_TYPRUN_SCAN = """//* //****************************************************************************** -//* Job containing a TYPRUN=SCAN that will cause JES to run a syntax check and -//* not actually run the JCL. +//* Job containing a TYPRUN=SCAN will cause JES to run a syntax check and +//* not actually run the JCL. The job will be put on the H output queue, DDs +//* JESJCL and JESMSGLG are available. Ansible considers this a passing job. //* Returns: -//* ret_code->(code=null, msg=? ?, msg_text=?, msg_code=?) -//* msg --> The JCL submitted with job id JOB00620 but there was an error, -//* please review the error for further details: The job return code -//* was not available in the job log, please review the job log -//* and error ? ?.", +//* ret_code->(code=null, msg=TYPRUN=SCAN, msg_txt=<msg>, msg_code=null) +//* msg --> The job JOB00551 was run with special job processing TYPRUN=SCAN. +//* This will result in no completion, return code or job steps and +//* changed will be false." //****************************************************************************** -//TYPESCAN JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, -// MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=SCAN +//SCAN JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=SCAN //STEP0001 EXEC PGM=IEBGENER //SYSIN DD DUMMY //SYSPRINT DD SYSOUT=* //SYSUT1 DD * -HELLO, WORLD +HELLO, WORLD. SCAN OPERATION +/* +//SYSUT2 DD SYSOUT=* +// +""" + +JCL_FILE_CONTENTS_TYPRUN_COPY = """//* +//****************************************************************************** +//* Job containing a TYPRUN=COPY will cause JES to copy the input job +//* (source content) stream directly to a sysout data set (device specified in +//* the message class parameter (H)) and schedule it for output processing, in +//* other words, the job will be put on the H output queue; DD's +//* JESMSGLG and JESJCLIN are available. Ansible considers this a failing job +//* given currently the jobs status can not be determined so it times out. +//* Returns: +//* ret_code->(code=None, msg=None, msg_txt=<msg>, msg_code=None) +//* msg --> The JCL submitted with job id JOB00555 but appears to be a long +//* running job that exceeded its maximum wait time of 10 second(s). +//* Consider using module zos_job_query to poll for a long running +//* job or increase option 'wait_times_s' to a value greater than 11. +//****************************************************************************** +//COPY JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=COPY +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD. COPY OPERATION +/* +//SYSUT2 DD SYSOUT=* +// +""" + +JCL_FILE_CONTENTS_TYPRUN_HOLD = """//* +//****************************************************************************** +//* Job containing a TYPRUN=HOLD will cause JES to hold this JCL without +//* executing it until a special event occurs at which time, the operator will +//* release the job from HOLD and allow the job to continue processing. +//* Ansible considers this a failing job +//* given currently the jobs status can not be determined so it times out. +//* Returns: +//* ret_code->(code=None, msg=None, msg_txt=<msg>, msg_code=None) +//* msg --> The JCL submitted with job id JOB00555 but appears to be a long +//* running job that exceeded its maximum wait time of 10 second(s). +//* Consider using module zos_job_query to poll for a long running +//* job or increase option 'wait_times_s' to a value greater than 11. +//****************************************************************************** +//HOLD JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=HOLD +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD. HOLD OPERATION +/* +//SYSUT2 DD SYSOUT=* +// +""" + +JCL_FILE_CONTENTS_TYPRUN_JCLHOLD = """//* +//****************************************************************************** +//* Job containing a TYPRUN=JCLHOLD will cause JES to will keep the submitted +//* job in the input queue until it's released by an operator or by the default +//* time assigned to the class parameter. As the operator you enter 'A' or 'R' +//* to release it from the queue. +//* Ansible considers this a failing job +//* given currently the jobs status can not be determined so it times out. +//* Returns: +//* ret_code->(code=None, msg=None, msg_txt=<msg>, msg_code=None) +//* msg --> The JCL submitted with job id JOB00555 but appears to be a long +//* running job that exceeded its maximum wait time of 10 second(s). +//* Consider using module zos_job_query to poll for a long running +//* job or increase option 'wait_times_s' to a value greater than 11. +//****************************************************************************** +//JCLHOLD JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, +// MSGCLASS=H,MSGLEVEL=1,NOTIFY=S0JM,TYPRUN=JCLHOLD +//STEP0001 EXEC PGM=IEBGENER +//SYSIN DD DUMMY +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD * +HELLO, WORLD. JCLHOLD OPERATION /* //SYSUT2 DD SYSOUT=* // @@ -342,9 +422,11 @@ def test_job_submit_PDS(ansible_zos_module, location): hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) + hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) + hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) ) @@ -362,8 +444,8 @@ def test_job_submit_PDS(ansible_zos_module, location): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_PDS_special_characters(ansible_zos_module): @@ -374,7 +456,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="pds", replace=True + name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="PDS", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format( @@ -465,7 +547,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True, volumes=volume_1 + name=data_set_name, state="present", type="PDS", replace=True, volumes=volume_1 ) hosts.all.shell( @@ -473,7 +555,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="uncataloged", type="pds" + name=data_set_name, state="uncataloged", type="PDS" ) results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="DATA_SET", volume=volume_1) @@ -498,7 +580,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) hosts.all.shell( @@ -531,7 +613,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) hosts.all.shell( @@ -564,7 +646,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True + name=data_set_name, state="present", type="PDS", replace=True ) hosts.all.shell( @@ -734,43 +816,113 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_NO_DSN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="LOCAL") + import pprint for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False assert re.search(r'completion code', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None -# Should have a JCL ERROR <int> def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_INVALID_USER) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error SEC', repr(result.get("msg"))) + assert re.search(r'please review the error for further details', repr(result.get("msg"))) + assert re.search(r'please review the job log for status SEC', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_text"))) + assert re.search(r'please review the job log for status SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) -def test_negative_job_submit_local_jcl_typrun_scan(ansible_zos_module): +def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'run with special job processing TYPRUN=SCAN', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "TYPRUN=SCAN" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + + +def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_COPY) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) + import pprint + for result in results.contacted.values(): + pprint.pprint(result) + assert result.get("changed") is False + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'please review the job log', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") is None + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + + +def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_HOLD) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False - assert re.search(r'return code was not available', repr(result.get("msg"))) - assert re.search(r'error ? ?', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None - assert result.get("jobs")[0].get("ret_code").get("msg_text") == "?" + assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + + +def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): + tmp_file = tempfile.NamedTemporaryFile(delete=True) + with open(tmp_file.name, "w") as f: + f.write(JCL_FILE_CONTENTS_TYPRUN_JCLHOLD) + hosts = ansible_zos_module + results = hosts.all.zos_job_submit(src=tmp_file.name, + location="LOCAL", + wait_time_s=20, + encoding={ + "from": "UTF-8", + "to": "IBM-1047" + },) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("jobs")[0].get("job_id") is not None + assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None # This test case is related to the following GitHub issues: @@ -807,4 +959,4 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=TEMP_PATH, state="absent") \ No newline at end of file diff --git a/tests/unit/test_zoau_version_checker_unit.py b/tests/unit/test_zoau_version_checker_unit.py index 96031f4a1..15bcce58b 100644 --- a/tests/unit/test_zoau_version_checker_unit.py +++ b/tests/unit/test_zoau_version_checker_unit.py @@ -45,10 +45,24 @@ (['1','2','1'], "2022/08/17 21:25:13 CUT V1.2.1"), (['1','2','1'], "2022/08/25 21:44:21 CUT V1.2.1 31163ab 1856"), (['1','2','1'], "2022/09/07 15:26:50 CUT V1.2.1 d2f6557 1880"), + (['1','2','1','1'], ""), (['1','2','3'], "2022/12/03 13:33:22 CUT V1.2.3 6113dc9 2512"), (['1','2','2'], "2022/12/06 20:44:00 CUT V1.2.2 ee30137 2525"), (['1','2','3'], "2023/03/16 18:17:00 CUT V1.2.3 1aa591fb 2148 PH50145"), - (['1', '2', '4', '0'], "2023/06/02 13:28:30 CUT V1.2.4.0 3b866824 2873 PH52034 826 267d9646"), + (['1','2','3','1'], ""), + (['1','2','3','2'], ""), + (['1','2','4','0'], "2023/06/02 13:28:30 CUT V1.2.4.0 3b866824 2873 PH52034 826 267d9646"), + (['1','2','4','1'], ""), + (['1','2','4','2'], ""), + (['1','2','4','3'], ""), + (['1','2','4','4'], ""), + (['1','2','4','5'], ""), + (['1','2','5','0'], ""), + (['1','2','5','1'], ""), + (['1','2','5','2'], ""), + (['1','2','5','3'], ""), + (['1','2','5','4'], ""), + (['1','2','5','6'], ""), ] From 5f743e6df0c97378c1215c10950143108c2fff21 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 2 Apr 2024 17:36:46 -0600 Subject: [PATCH 335/495] Enabler/add ansible sanity action (#1313) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Create bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Update bandit.yml * Added changelog action * Update changelog.yml * Create close-stale-issues * Update close-stale-issues Quite el workflow dispatch * Create bandit2.yml * Update bandit2.yml * Update zos_copy.py * Update zos_copy.py Me equivoque * Create ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Update ansible-test.yml * Added ac changelog * added lint as an option * Added documentation to ac_changelog * Changed 'lint' to 'command' on ac_changelog * Create * Create first version of the changelog action * Update changelog.yml * Fix changelog.yml * Change name of action Antsibull 'Changelog lint' to AC Changelog lint * Rename 'changelog.yml' to 'ac_changelog.yml * Create ac_changelog.yml * Update ac_changelog.yml * Update ac_changelog.yml * Update ac_changelog.yml * Change path in 'venv setup' on ac * Change ac_changelog.yml * Change ac_changelog.yml * Change ac_changelog.yml * Change ac_changelog.yml * Removed not required github actions * Update zos_copy.py * Update ac_changelog.yml * Create 'ac-ansible-test.yml' * Test * Delete test changelog * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix ac ansible sanity * Fix paths * Delete commented lines * Delete weird changes * Delete weird changes * Update ac-ansible-test-sanity.yml --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .github/workflows/ac-ansible-test-sanity.yml | 71 ++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 .github/workflows/ac-ansible-test-sanity.yml diff --git a/.github/workflows/ac-ansible-test-sanity.yml b/.github/workflows/ac-ansible-test-sanity.yml new file mode 100644 index 000000000..1354195a5 --- /dev/null +++ b/.github/workflows/ac-ansible-test-sanity.yml @@ -0,0 +1,71 @@ +name: AC Ansible sanity + +on: + pull_request: + branches: + - dev + - staging* + paths-ignore: + - '**.tar.gz' + - 'pycache/**' + - '.ansible-lint' + - 'cache/**' + - '.DS_Store' + - '.git/**' + - '.github/**' + - '.gitignore' + - '.python-version' + - '.pytest_cache/**' + - '.vscode/**' + - 'Jenkinsfile' + - 'ac' + - 'ansible.cfg' + - 'changelogs/**' + - 'collections/**' + - 'docs/**' + - 'scripts/**' + - 'test_config.yml' + - 'tests/*.ini' + - 'tests/*.py' + - 'tests/.pytest_cache' + - 'tests/pycache' + - 'tests/functional' + - 'tests/helpers' + - 'tests/requirements.txt' + - 'tests/unit' + - 'tests/sanity/ignore-*' + - 'venv*' + +jobs: + ansible-sanity: + runs-on: ubuntu-latest + env: + branch: ${{ github.event.pull_request.head.ref }} + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + python -m pip install --upgrade pip + pip install ansible + + - name: Run ac-sanity + run: | + source venv/venv-2.16/bin/activate + ./ac --ac-build + ./ac --ac-sanity From 3d248c42e09bfb45d0c50938236b50378ed07256 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 2 Apr 2024 17:38:18 -0600 Subject: [PATCH 336/495] [Bugfix][1201]Zos_mvs_raw_ignores_tmp_hlq (#1320) * Add first iteration * Fix mvs_raw * Add another format * Add define * Add parms to avoid fails * Quick fix to not avoid tmphlq * Fix sanity issues * Fix white spaces * Return call of hlq * Add fragment * Fix capital letters * Change fragment * Fix case sensitive data set * Fix not exist dataset * Return dataset * Fix upper case for latest dataset and change of datasize from dtouch * Fix upper case and lower case * Change typo * Fix documentation * Fix not match * Unit testing to uppercase * Fis uppercases in mvs raw * Add uppercase * New problem ID * Remove unnecesary function and add KSDS solution --- .../1320-Zos_mvs_raw_ignores_tmp_hlq.yml | 5 + plugins/module_utils/zos_mvs_raw.py | 6 +- plugins/modules/zos_mvs_raw.py | 260 ++++++++---------- .../modules/test_zos_mvs_raw_func.py | 88 +++--- tests/unit/test_zos_mvs_raw_unit.py | 80 +++--- 5 files changed, 210 insertions(+), 229 deletions(-) create mode 100644 changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml diff --git a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml new file mode 100644 index 000000000..058faf66e --- /dev/null +++ b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. + Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created + during the module execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1320). \ No newline at end of file diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index 7c2badf84..466775939 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -24,7 +24,7 @@ class MVSCmd(object): """ @staticmethod - def execute(pgm, dds, parm="", debug=False, verbose=False): + def execute(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): """Execute an unauthorized MVS command. Args: @@ -36,9 +36,10 @@ def execute(pgm, dds, parm="", debug=False, verbose=False): MVSCmdResponse: The response of the command. """ module = AnsibleModuleHelper(argument_spec={}) - command = "mvscmd {0} {1} {2} ".format( + command = "mvscmd {0} {1} {2} {3}".format( "-d" if debug else "", "-v" if verbose else "", + "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) rc, out, err = module.run_command(command) @@ -64,7 +65,6 @@ def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=No "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) - rc, out, err = module.run_command(command) return MVSCmdResponse(rc, out, err) diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 502d2ead7..a440c31c6 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -96,16 +96,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - library - - pds - - pdse - - large - - basic - - seq - - rrds - - esds - - lds - - ksds + - LIBRARY + - PDS + - PDSE + - LARGE + - BASIC + - SEQ + - RRDS + - ESDS + - LDS + - KSDS disposition: description: - I(disposition) indicates the status of a data set. @@ -174,12 +174,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - trk - - cyl - - b - - k - - m - - g + - TRK + - CYL + - B + - K + - M + - G space_primary: description: - The primary amount of space to allocate for a new data set. @@ -325,11 +325,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -505,11 +505,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -717,16 +717,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - library - - pds - - pdse - - large - - basic - - seq - - rrds - - esds - - lds - - ksds + - LIBRARY + - PDS + - PDSE + - LARGE + - BASIC + - SEQ + - RRDS + - ESDS + - LDS + - KSDS disposition: description: - I(disposition) indicates the status of a data set. @@ -795,12 +795,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - trk - - cyl - - b - - k - - m - - g + - TRK + - CYL + - B + - K + - M + - G space_primary: description: - The primary amount of space to allocate for a new data set. @@ -946,11 +946,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -1124,11 +1124,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - u - - vb - - vba - - fb - - fba + - U + - VB + - VBA + - FB + - FBA return_content: description: - Determines how content should be returned to the user. @@ -1300,13 +1300,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1324,13 +1324,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1369,13 +1369,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1398,15 +1398,15 @@ disposition: new replace: yes backup: yes - type: seq + type: SEQ space_primary: 5 space_secondary: 1 - space_type: m + space_type: M volumes: - "000000" - "111111" - "SCR002" - record_format: fb + record_format: FB return_content: type: text - dd_input: @@ -1628,10 +1628,6 @@ backups = [] -# Use of global tmphlq to keep coherent classes definitions -g_tmphlq = "" - - def run_module(): """Executes all module-related functions. @@ -1651,7 +1647,7 @@ def run_module(): type="str", choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], ), - space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), + space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type="raw"), @@ -1664,16 +1660,16 @@ def run_module(): type=dict( type="str", choices=[ - "library", - "pds", - "pdse", - "seq", - "basic", - "large", - "ksds", - "rrds", - "lds", - "esds", + "LIBRARY", + "PDS", + "PDSE", + "SEQ", + "BASIC", + "LARGE", + "KSDS", + "RRDS", + "LDS", + "ESDS", ], ), encryption_key_1=dict( @@ -1695,7 +1691,7 @@ def run_module(): key_length=dict(type="int", no_log=False), key_offset=dict(type="int", no_log=False), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -1770,7 +1766,7 @@ def run_module(): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -1839,8 +1835,7 @@ def run_module(): if not module.check_mode: try: parms = parse_and_validate_args(module.params) - global g_tmphlq - g_tmphlq = parms.get("tmp_hlq") + tmphlq = parms.get("tmp_hlq") dd_statements = build_dd_statements(parms) program = parms.get("program_name") program_parm = parms.get("parm") @@ -1852,6 +1847,7 @@ def run_module(): dd_statements=dd_statements, authorized=authorized, verbose=verbose, + tmp_hlq=tmphlq, ) if program_response.rc != 0 and program_response.stderr: raise ZOSRawError( @@ -1894,7 +1890,7 @@ def parse_and_validate_args(params): type="str", choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], ), - space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), + space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type=volumes), @@ -1907,16 +1903,16 @@ def parse_and_validate_args(params): type=dict( type="str", choices=[ - "library", - "pds", - "pdse", - "seq", - "basic", - "large", - "ksds", - "rrds", - "lds", - "esds", + "LIBRARY", + "PDS", + "PDSE", + "SEQ", + "BASIC", + "LARGE", + "KSDS", + "RRDS", + "LDS", + "ESDS", ], ), encryption_key_1=dict( @@ -1940,7 +1936,7 @@ def parse_and_validate_args(params): type=key_offset, default=key_offset_default, dependencies=["type"] ), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -1996,7 +1992,7 @@ def parse_and_validate_args(params): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), + record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), return_content=dict( type="dict", options=dict( @@ -2088,8 +2084,8 @@ def key_length(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "ksds": - raise ValueError('key_length is only valid when "type=ksds".') + if contents is not None and dependencies.get("type") != "KSDS": + raise ValueError('key_length is only valid when "type=KSDS".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( 'Invalid argument "{0}" for type "key_length".'.format(str(contents)) @@ -2109,8 +2105,8 @@ def key_offset(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "ksds": - raise ValueError('key_offset is only valid when "type=ksds".') + if contents is not None and dependencies.get("type") != "KSDS": + raise ValueError('key_offset is only valid when "type=KSDS".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( @@ -2131,9 +2127,9 @@ def key_length_default(contents, dependencies): """ KEY_LENGTH = 5 length = None - if contents is None and dependencies.get("type") == "ksds": + if contents is None and dependencies.get("type") == "KSDS": length = KEY_LENGTH - elif dependencies.get("type") == "ksds": + elif dependencies.get("type") == "KSDS": length = contents return length @@ -2149,9 +2145,9 @@ def key_offset_default(contents, dependencies): """ KEY_OFFSET = 0 offset = None - if contents is None and dependencies.get("type") == "ksds": + if contents is None and dependencies.get("type") == "KSDS": offset = KEY_OFFSET - elif dependencies.get("type") == "ksds": + elif dependencies.get("type") == "KSDS": offset = contents return offset @@ -2408,7 +2404,7 @@ def build_dd_statements(parms): dd_statements = [] for dd in parms.get("dds"): dd_name = get_dd_name(dd) - dd = set_extra_attributes_in_dd(dd) + dd = set_extra_attributes_in_dd(dd, parms) data_definition = build_data_definition(dd) if data_definition is None: raise ValueError("No valid data definition found.") @@ -2444,26 +2440,27 @@ def get_dd_name(dd): return dd_name -def set_extra_attributes_in_dd(dd): +def set_extra_attributes_in_dd(dd, parms): """ - Set any extra attributes in dds like in global g_tmphlq. + Set any extra attributes in dds like in global tmp_hlq. Args: dd (dict): A single DD parm as specified in module parms. Returns: dd (dict): A single DD parm as specified in module parms. """ + tmphlq = parms.get("tmp_hlq") if dd.get("dd_data_set"): - dd.get("dd_data_set")["tmphlq"] = g_tmphlq + dd.get("dd_data_set")["tmphlq"] = tmphlq elif dd.get("dd_input"): - dd.get("dd_input")["tmphlq"] = g_tmphlq + dd.get("dd_input")["tmphlq"] = tmphlq elif dd.get("dd_output"): - dd.get("dd_output")["tmphlq"] = g_tmphlq + dd.get("dd_output")["tmphlq"] = tmphlq elif dd.get("dd_vio"): - dd.get("dd_vio")["tmphlq"] = g_tmphlq + dd.get("dd_vio")["tmphlq"] = tmphlq elif dd.get("dd_concat"): for single_dd in dd.get("dd_concat").get("dds", []): - set_extra_attributes_in_dd(single_dd) + set_extra_attributes_in_dd(single_dd, parms) return dd @@ -2572,6 +2569,7 @@ def __init__( """ self.backup = None self.return_content = ReturnContent(**(return_content or {})) + self.tmphlq = tmphlq primary_unit = space_type secondary_unit = space_type key_label1 = None @@ -2698,7 +2696,6 @@ def __init__( ) -# TODO: potentially extend the available parameters to end user class RawInputDefinition(InputDefinition): """Wrapper around InputDefinition to contain information about desired return contents. @@ -2707,7 +2704,7 @@ class RawInputDefinition(InputDefinition): InputDefinition (InputDefinition): Input DD data type to be used in a DDStatement. """ - def __init__(self, content="", return_content=None, **kwargs): + def __init__(self, content="", return_content=None, tmphlq="", **kwargs): """Initialize RawInputDefinition Args: @@ -2715,7 +2712,7 @@ def __init__(self, content="", return_content=None, **kwargs): return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}. """ self.return_content = ReturnContent(**(return_content or {})) - super().__init__(content=content) + super().__init__(content=content, tmphlq=tmphlq) class RawOutputDefinition(OutputDefinition): @@ -2726,7 +2723,7 @@ class RawOutputDefinition(OutputDefinition): OutputDefinition (OutputDefinition): Output DD data type to be used in a DDStatement. """ - def __init__(self, return_content=None, **kwargs): + def __init__(self, return_content=None, tmphlq="", **kwargs): """Initialize RawOutputDefinition Args: @@ -2734,7 +2731,7 @@ def __init__(self, return_content=None, **kwargs): return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}. """ self.return_content = ReturnContent(**(return_content or {})) - super().__init__() + super().__init__(tmphlq=tmphlq) class ReturnContent(object): @@ -2761,28 +2758,6 @@ def __init__(self, type=None, src_encoding=None, response_encoding=None): self.response_encoding = response_encoding -def to_bytes(size, unit): - """Convert sizes of various units to bytes. - - Args: - size (int): The size to convert. - unit (str): The unit of size. - - Returns: - int: The size converted to bytes. - """ - num_bytes = 0 - if unit == "b": - num_bytes = size - elif unit == "k": - num_bytes = size * 1024 - elif unit == "m": - num_bytes = size * 1048576 - elif unit == "g": - num_bytes = size * 1073741824 - return num_bytes - - def rename_parms(parms, name_map): """Rename parms based on a provided dictionary. @@ -2839,7 +2814,7 @@ def data_set_exists(name, volumes=None): def run_zos_program( - program, parm="", dd_statements=None, authorized=False, verbose=False + program, parm="", dd_statements=None, authorized=False, verbose=False, tmp_hlq=None ): """Run a program on z/OS. @@ -2848,6 +2823,7 @@ def run_zos_program( parm (str, optional): Additional argument string if required. Defaults to "". dd_statements (list[DDStatement], optional): DD statements to allocate for the program. Defaults to []. authorized (bool, optional): Determines if program will execute as an authorized user. Defaults to False. + tmp_hlq (str, optional): Arguments overwrite variable tmp_hlq Returns: MVSCmdResponse: Holds the response information for program execution. @@ -2857,11 +2833,11 @@ def run_zos_program( response = None if authorized: response = MVSCmd.execute_authorized( - pgm=program, parm=parm, dds=dd_statements, verbose=verbose + pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq ) else: response = MVSCmd.execute( - pgm=program, parm=parm, dds=dd_statements, verbose=verbose + pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq ) return response diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index fd20a6a92..ca5b6384d 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -62,7 +62,7 @@ def test_disposition_new(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -86,7 +86,7 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -118,7 +118,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, default_volume = volumes.get_available_vol() default_data_set = get_tmp_ds_name()[:25] hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -133,12 +133,12 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, return_content=dict(type="text"), replace=True, backup=True, - type="seq", + type="SEQ", space_primary=5, space_secondary=1, - space_type="m", + space_type="M", volumes=default_volume, - record_format="fb" + record_format="FB" ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), @@ -172,7 +172,7 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=DEFAULT_DATA_SET_WITH_MEMBER, disposition="new", - type="pds", + type="PDS", directory_blocks=15, return_content=dict(type="text"), ), @@ -197,7 +197,7 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' hosts.all.zos_data_set( - name=default_data_set, type="pds", state="present", replace=True + name=default_data_set, type="PDS", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -234,7 +234,7 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="seq", + type="SEQ", state="present", replace=True, volumes=[volume_1], @@ -267,11 +267,11 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch @pytest.mark.parametrize( "space_type,primary,secondary,expected", [ - ("trk", 3, 1, 169992), - ("cyl", 3, 1, 2549880), - ("b", 3, 1, 56664), - ("k", 3, 1, 56664), - ("m", 3, 1, 2889864), + ("TRK", 3, 1, 169992), + ("CYL", 3, 1, 2549880), + ("B", 3, 1, 56664), + ("K", 3, 1, 56664), + ("M", 3, 1, 3003192), ], ) def test_space_types(ansible_zos_module, space_type, primary, secondary, expected): @@ -288,7 +288,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", space_primary=primary, space_secondary=secondary, space_type=space_type, @@ -315,7 +315,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte @pytest.mark.parametrize( "data_set_type", - ["pds", "pdse", "large", "basic", "seq"], + ["PDS", "PDSE", "LARGE", "BASIC", "SEQ"], ) def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -351,7 +351,7 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_s @pytest.mark.parametrize( "data_set_type", - ["ksds", "rrds", "lds", "esds"], + ["KSDS", "RRDS", "LDS", "ESDS"], ) def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -374,7 +374,7 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste volumes=[volume_1], ), ) - if data_set_type != "ksds" + if data_set_type != "KSDS" else dict( dd_data_set=dict( dd_name=SYSPRINT_DD, @@ -393,14 +393,14 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste # * because that means data set exists and is VSAM so we can't read it results = hosts.all.command(cmd="head \"//'{0}'\"".format(default_data_set)) for result in results.contacted.values(): - assert "EDC5041I" in result.get("stderr", "") + assert "EDC5041I" or "EDC5049I" in result.get("stderr", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") @pytest.mark.parametrize( "record_format", - ["u", "vb", "vba", "fb", "fba"], + ["U", "VB", "VBA", "FB", "FBA"], ) def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): try: @@ -453,7 +453,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="seq", + type="SEQ", state="present", replace=True, volumes=[volume_1], @@ -505,7 +505,7 @@ def test_return_text_content_encodings( default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="seq", + type="SEQ", state="present", replace=True, volumes=[volume_1], @@ -544,7 +544,7 @@ def test_reuse_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -555,7 +555,7 @@ def test_reuse_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", reuse=True, return_content=dict(type="text"), ), @@ -577,7 +577,7 @@ def test_replace_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True + name=default_data_set, type="SEQ", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -588,7 +588,7 @@ def test_replace_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, return_content=dict(type="text"), ), @@ -619,7 +619,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, return_content=dict(type="text"), ), @@ -636,7 +636,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, backup=True, return_content=dict(type="text"), @@ -687,7 +687,7 @@ def test_input_empty(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -719,7 +719,7 @@ def test_input_large(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -752,7 +752,7 @@ def test_input_provided_as_list(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ), ), @@ -792,7 +792,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", ), ), dict( @@ -844,7 +844,7 @@ def test_input_return_text_content_encodings( dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", ), ), dict( @@ -1164,7 +1164,7 @@ def test_file_record_length(ansible_zos_module, record_length): @pytest.mark.parametrize( "record_format", - ["u", "vb", "vba", "fb", "fba"], + ["U", "VB", "VBA", "FB", "FBA"], ) def test_file_record_format(ansible_zos_module, record_format): try: @@ -1353,7 +1353,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", return_content=dict(type="text"), ) ), @@ -1361,7 +1361,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", ) ), ], @@ -1391,8 +1391,8 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu hosts = ansible_zos_module default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() - hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="seq") + hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="SEQ") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1405,7 +1405,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="seq", + type="SEQ", replace=True, backup=True, return_content=dict(type="text"), @@ -1415,7 +1415,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", replace=True, backup=True, ) @@ -1462,7 +1462,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' - hosts.all.zos_data_set(name=default_data_set, state="present", type="pds") + hosts.all.zos_data_set(name=default_data_set, state="present", type="PDS") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1482,7 +1482,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", ) ), ], @@ -1538,7 +1538,7 @@ def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="seq", + type="SEQ", ) ), ], @@ -1766,7 +1766,7 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co try: hosts = ansible_zos_module default_data_set = "ANSIBLE.USER.PRIVATE.TEST" - hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") + hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") results = hosts.all.zos_mvs_raw(program_name="idcams", auth=True, dds=dds) diff --git a/tests/unit/test_zos_mvs_raw_unit.py b/tests/unit/test_zos_mvs_raw_unit.py index e50734756..f528412da 100644 --- a/tests/unit/test_zos_mvs_raw_unit.py +++ b/tests/unit/test_zos_mvs_raw_unit.py @@ -59,7 +59,7 @@ def run_command(self, *args, **kwargs): "new", "keep", "keep", - "cyl", + "CYL", 5, 1, "smsclas1", @@ -67,17 +67,17 @@ def run_command(self, *args, **kwargs): "smsclas1", 80, "SOMEKEYLAB100", - "library", + "LIBRARY", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "u", + "U", ), ( "data.set.name(mem1)", "shr", "delete", "keep", - "trk", + "TRK", "5", 1, "smsclas1", @@ -85,17 +85,17 @@ def run_command(self, *args, **kwargs): "smsclas3", 120, "somekeylab1", - "basic", + "BASIC", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "fb", + "FB", ), ( "DATA.NAME.HERE.NOW", "old", "catalog", "uncatalog", - "b", + "B", 55, "100", "SMSCLASS", @@ -103,17 +103,17 @@ def run_command(self, *args, **kwargs): "smscD@s3", 120, "keyfor342fdsme", - "large", + "LARGE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "fba", + "FBA", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "g", + "G", 1, "9", "SMSCLASS", @@ -121,17 +121,17 @@ def run_command(self, *args, **kwargs): "", 120, "keyfor342fdsme", - "pdse", + "PDSE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "vb", + "VB", ), ( "DAT$.now", "new", "delete", "keep", - "m", + "M", 1, 9, "SMSCLASS", @@ -139,10 +139,10 @@ def run_command(self, *args, **kwargs): "", 0, "", - "lds", + "LDS", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "vba", + "VBA", ), ], ) @@ -237,7 +237,7 @@ def test_argument_parsing_data_set( "delete", 0, 100, - "fb", + "FB", "record", "r", ["ocreat", "oappend", "onoctty"], @@ -248,14 +248,14 @@ def test_argument_parsing_data_set( "delete", 200, "100", - "fba", + "FBA", "record", "w", ["oappend", "osync"], ), - ("/u/OEUSR01", "keep", "delete", 0, 100, "vb", "binary", "rw", ["ononblock"]), - ("/u/testmeee", "keep", "delete", 0, 100, "vba", "record", "read_only", []), - ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "u", "text", "write_only", []), + ("/u/OEUSR01", "keep", "delete", 0, 100, "VB", "binary", "rw", ["ononblock"]), + ("/u/testmeee", "keep", "delete", 0, 100, "VBA", "record", "read_only", []), + ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "U", "text", "write_only", []), ], ) def test_argument_parsing_unix( @@ -338,7 +338,7 @@ def test_argument_parsing_unix( "old", "keep", "keep", - "cyl", + "CYL", 5, 1, "smsclas1", @@ -346,17 +346,17 @@ def test_argument_parsing_unix( "smsclas1", 80, "SOMEKEYLAB100", - "library", + "LIBRARY", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "u", + "U", ), ( "data.set.name(mem1waytoolong)", "excl", "delete", "keep", - "trk", + "TRK", "5", 1, "smsclas1", @@ -364,10 +364,10 @@ def test_argument_parsing_unix( "smsclas3", 120, "somekeylab1", - "basic", + "BASIC", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "fb", + "FB", ), ( "DATA.NAME.HERE.NOW", @@ -382,17 +382,17 @@ def test_argument_parsing_unix( "smscD@s3", 120, "keyfor342fdsme", - "large", + "LARGE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "fba", + "FBA", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "g", + "G", 1, "9", "SMSCLASSsss", @@ -400,17 +400,17 @@ def test_argument_parsing_unix( "", 120, "keyfor342fdsme", - "pdse", + "PDSE", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "vb", + "VB", ), ( "DAT$.now", "new", "delete", "meep", - "m", + "M", 1, 9, "SMSCLASS", @@ -418,10 +418,10 @@ def test_argument_parsing_unix( "", 0, "", - "ksdss", + "KSDSS", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "vba", + "VBA", ), ], ) @@ -525,7 +525,7 @@ def test_argument_parsing_data_set_failure_path( "delete", 200, "100", - "fba", + "FBA", "record", "w", ["append", "osync"], @@ -537,12 +537,12 @@ def test_argument_parsing_data_set_failure_path( "delete", 0, 100, - "vba", + "VBA", "record", "read_only", ["hello"], ), - ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "u", "text", None, []), + ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "U", "text", None, []), ], ) def test_argument_parsing_unix_failure_path( @@ -620,7 +620,7 @@ def test_ksds_defaults( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "ksds", + "type": "KSDS", } }, ], @@ -663,7 +663,7 @@ def test_ksds_exception_key_length( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "esds", + "type": "ESDS", "key_length": 5, } }, @@ -693,7 +693,7 @@ def test_ksds_exception_key_offset( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "esds", + "type": "ESDS", "key_offset": 5, } }, From 2697e32b474ec33832e2977c3e73246904c3e5ad Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 4 Apr 2024 12:23:10 -0400 Subject: [PATCH 337/495] Removed a test condition that obscured duration (#1364) * removed a function in a test that would obscure if null durations are coming back it appears this issue is resolved. * added changelog --------- Co-authored-by: Demetri <dimatos@gmail.com> --- changelogs/fragments/1032-clean-job_submit-test.yml | 3 +++ tests/functional/modules/test_zos_job_submit_func.py | 6 ++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1032-clean-job_submit-test.yml diff --git a/changelogs/fragments/1032-clean-job_submit-test.yml b/changelogs/fragments/1032-clean-job_submit-test.yml new file mode 100644 index 000000000..bb4248aec --- /dev/null +++ b/changelogs/fragments/1032-clean-job_submit-test.yml @@ -0,0 +1,3 @@ +trivial: + - test_zos_job_submit_func.py - Removed test setting that was covering a missing duration value. + (https://github.com/ansible-collections/ibm_zos_core/pull/1364). diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index bae4dbb36..c148b6223 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -695,10 +695,8 @@ def test_job_submit_max_rc(ansible_zos_module, args): #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" # - Consider using module zos_job_query to poll for a long running job or # increase option \\'wait_times_s` to a value greater than 10.", - if result.get('duration'): - duration = result.get('duration') - else: - duration = 0 + + duration = result.get('duration') if duration >= args["wait_time_s"]: re.search(r'long running job', repr(result.get("msg"))) From aeafa82cb02c19068f8f704b093a6b07dec15392 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 5 Apr 2024 11:20:17 -0600 Subject: [PATCH 338/495] Updated actions to only run when PR is not draft (#1412) * Updated actions to only run when PR is not draft * Add test * Modified draft condition * Update zos_apf.py * Modified workflows * test * test --- .github/workflows/ac-ansible-test-sanity.yml | 2 ++ .github/workflows/ac-bandit.yml | 6 +++- .github/workflows/ac-galaxy-importer.yml | 34 +++++++++++++++++++- .github/workflows/ac_changelog.yml | 2 ++ 4 files changed, 42 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ac-ansible-test-sanity.yml b/.github/workflows/ac-ansible-test-sanity.yml index 1354195a5..d0c4b58d2 100644 --- a/.github/workflows/ac-ansible-test-sanity.yml +++ b/.github/workflows/ac-ansible-test-sanity.yml @@ -2,6 +2,7 @@ name: AC Ansible sanity on: pull_request: + types: [opened, synchronize, reopened, ready_for_review] branches: - dev - staging* @@ -38,6 +39,7 @@ on: jobs: ansible-sanity: + if: github.event.pull_request.draft == false runs-on: ubuntu-latest env: branch: ${{ github.event.pull_request.head.ref }} diff --git a/.github/workflows/ac-bandit.yml b/.github/workflows/ac-bandit.yml index 288fb92b1..1b93e40a4 100644 --- a/.github/workflows/ac-bandit.yml +++ b/.github/workflows/ac-bandit.yml @@ -2,12 +2,16 @@ name: AC Bandit on: pull_request: + types: [opened, synchronize, reopened, ready_for_review] branches: - dev - staging* - + paths: + - 'plugins/**' + jobs: bandit: + if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: diff --git a/.github/workflows/ac-galaxy-importer.yml b/.github/workflows/ac-galaxy-importer.yml index 271f01c22..563d37ada 100644 --- a/.github/workflows/ac-galaxy-importer.yml +++ b/.github/workflows/ac-galaxy-importer.yml @@ -2,12 +2,44 @@ name: AC Galaxy Importer on: pull_request: + types: [opened, synchronize, reopened, ready_for_review] branches: - dev - staging* - + paths-ignore: + - '**.tar.gz' + - 'pycache/**' + - '.ansible-lint' + - 'cache/**' + - '.DS_Store' + - '.git/**' + - '.github/**' + - '.gitignore' + - '.python-version' + - '.pytest_cache/**' + - '.vscode/**' + - 'Jenkinsfile' + - 'ac' + - 'ansible.cfg' + - 'changelogs/**' + - 'collections/**' + - 'docs/**' + - 'scripts/**' + - 'test_config.yml' + - 'tests/*.ini' + - 'tests/*.py' + - 'tests/.pytest_cache' + - 'tests/pycache' + - 'tests/functional' + - 'tests/helpers' + - 'tests/requirements.txt' + - 'tests/unit' + - 'tests/sanity/ignore-*' + - 'venv*' + jobs: galaxy-importer: + if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: diff --git a/.github/workflows/ac_changelog.yml b/.github/workflows/ac_changelog.yml index 523e207b9..e3b3f3cc4 100644 --- a/.github/workflows/ac_changelog.yml +++ b/.github/workflows/ac_changelog.yml @@ -2,6 +2,7 @@ name: AC Changelog Lint on: pull_request: + types: [opened, synchronize, reopened, ready_for_review] paths: - 'changelogs/fragments/*' branches: @@ -10,6 +11,7 @@ on: jobs: lint: + if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: From d8b87a42117c99144bedd93e4f0b5f7964fc112c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 5 Apr 2024 11:20:49 -0600 Subject: [PATCH 339/495] [Documentation][encode] Add and standarize docstring to encode.py (#1322) * Add and estandarize docstring to encode.py * Create changelog fragment * Modified the google style to numpy * Update changelog fragment * Standarize numpy style * Update encode.py added newline to address pep8 error * Fixed some dcostrings * Modified docstrings --------- Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1322-update-docstring-encode.yml | 3 + plugins/module_utils/encode.py | 357 +++++++++++++----- 2 files changed, 269 insertions(+), 91 deletions(-) create mode 100644 changelogs/fragments/1322-update-docstring-encode.yml diff --git a/changelogs/fragments/1322-update-docstring-encode.yml b/changelogs/fragments/1322-update-docstring-encode.yml new file mode 100644 index 000000000..dd5eb5389 --- /dev/null +++ b/changelogs/fragments/1322-update-docstring-encode.yml @@ -0,0 +1,3 @@ +trivial: + - encode - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1322). \ No newline at end of file diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 195802583..f68a8ab77 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -56,10 +56,12 @@ class Defaults: @staticmethod def get_default_system_charset(): - """Get the default encoding of the current machine + """Get the default encoding of the current machine. - Returns: - str -- The encoding of the current machine + Returns + ------- + str + The encoding of the current machine. """ system_charset = locale.getdefaultlocale()[1] if system_charset is None: @@ -80,15 +82,24 @@ def get_default_system_charset(): class EncodeUtils(object): def __init__(self): """Call the coded character set conversion utility iconv - to convert a USS file from one coded character set to another - - Arguments: - module {AnsibleModule} -- The AnsibleModule object from currently running module + to convert a USS file from one coded character set to another. """ self.module = AnsibleModuleHelper(argument_spec={}) self.tmphlq = None def _validate_data_set_name(self, ds): + """Validate data set name using BetterArgParser. + + Parameters + ---------- + ds : str + The source data set name. + + Returns + ------- + str + Parsed data set name. + """ arg_defs = dict( ds=dict(arg_type="data_set"), ) @@ -97,6 +108,18 @@ def _validate_data_set_name(self, ds): return parsed_args.get("ds") def _validate_path(self, path): + """Validate path using BetterArgParser. + + Parameters + ---------- + path : str + The path. + + Returns + ------- + str + Parsed path. + """ arg_defs = dict( path=dict(arg_type="path"), ) @@ -105,6 +128,18 @@ def _validate_path(self, path): return parsed_args.get("path") def _validate_data_set_or_path(self, path): + """Validate data set or path using BetterArgParser. + + Parameters + ---------- + path : str + The path. + + Returns + ------- + str + Parsed path. + """ arg_defs = dict( path=dict(arg_type="data_set_or_path"), ) @@ -113,6 +148,18 @@ def _validate_data_set_or_path(self, path): return parsed_args.get("path") def _validate_encoding(self, encoding): + """Validate encoding using BetterArgParser. + + Parameters + --------- + encoding : str + The encoding. + + Returns + ------- + str + Parsed encoding. + """ arg_defs = dict( encoding=dict(arg_type="encoding"), ) @@ -122,16 +169,24 @@ def _validate_encoding(self, encoding): def listdsi_data_set(self, ds): """Invoke IDCAMS LISTCAT command to get the record length and space used - to estimate the space used by the VSAM data set - - Arguments: - ds: {str} -- The VSAM data set to be checked. - - Raises: - EncodeError: When any exception is raised during the conversion. - Returns: - int -- The maximum record length of the VSAM data set. - int -- The space used by the VSAM data set(KB). + to estimate the space used by the VSAM data set. + + Parameters + ---------- + ds : str + The VSAM data set to be checked. + + Returns + ------- + int + The maximum record length of the VSAM data set. + int + The space used by the VSAM data set(KB). + + Raises + ------ + EncodeError + When any exception is raised during the conversion. """ ds = self._validate_data_set_name(ds) reclen = 80 @@ -179,17 +234,24 @@ def listdsi_data_set(self, ds): return reclen, space_u def temp_data_set(self, reclen, space_u): - """Creates a temporary data set with the given record length and size - - Arguments: - size {str} -- The size of the data set - lrecl {int} -- The record length of the data set - - Returns: - str -- Name of the allocated data set - - Raises: - ZOAUException: When any exception is raised during the data set allocation. + """Creates a temporary data set with the given record length and size. + + Parameters + ---------- + lrecl : int + The record length of the data set. + space_u : str + The size of the data set. + + Returns + ------- + str + Name of the allocated data set. + + Raises + ------ + ZOAUException + When any exception is raised during the data set allocation. DatasetVerificationError: When the data set creation could not be verified. """ size = str(space_u * 2) + "K" @@ -208,12 +270,17 @@ def temp_data_set(self, reclen, space_u): return temporary_data_set.name def get_codeset(self): - """Get the list of supported encodings from the USS command 'iconv -l' + """Get the list of supported encodings from the USS command 'iconv -l'. + + Returns + ------- + Union[str] + The code set list supported in current USS platform. - Raises: - EncodeError: When any exception is raised during the conversion - Returns: - list -- The code set list supported in current USS platform + Raises + ------ + EncodeError + When any exception is raised during the conversion. """ code_set = None iconv_list_cmd = ["iconv", "-l"] @@ -226,17 +293,26 @@ def get_codeset(self): return code_set def string_convert_encoding(self, src, from_encoding, to_encoding): - """Convert the encoding of the data when the src is a normal string - - Arguments: - from_code_set: {str} -- The source code set of the string - to_code_set: {str} -- The destination code set for the string - src: {str} -- The input string content - - Raises: - EncodeError: When any exception is raised during the conversion - Returns: - str -- The string content after the encoding + """Convert the encoding of the data when the src is a normal string. + + Parameters + ---------- + src : str + The input string content. + from_encoding : str + The source code set of the string. + to_encoding : str + The destination code set for the string. + + Returns + ------- + str + The string content after the encoding. + + Raises + ------ + EncodeError + When any exception is raised during the conversion. """ from_encoding = self._validate_encoding(from_encoding) to_encoding = self._validate_encoding(to_encoding) @@ -249,19 +325,30 @@ def string_convert_encoding(self, src, from_encoding, to_encoding): return out def uss_convert_encoding(self, src, dest, from_code, to_code): - """Convert the encoding of the data in a USS file - - Arguments: - from_code: {str} -- The source code set of the input file - to_code: {str} -- The destination code set for the output file - src: {str} -- The input file name, it should be a uss file - dest: {str} -- The output file name, it should be a uss file - - Raises: - EncodeError: When any exception is raised during the conversion. - MoveFileError: When any exception is raised during moving files. - Returns: - boolean -- Indicate whether the conversion is successful or not. + """Convert the encoding of the data in a USS file. + + Parameters + ---------- + src : str + The input file name, it should be a uss file. + dest : str + The output file name, it should be a uss file. + from_code : str + The source code set of the input file. + to_code : str + The destination code set for the output file. + + Returns + ------- + bool + Indicate whether the conversion is successful or not. + + Raises + ------ + EncodeError + When any exception is raised during the conversion. + MoveFileError + When any exception is raised during moving files. """ src = self._validate_path(src) dest = self._validate_path(dest) @@ -306,18 +393,28 @@ def uss_convert_encoding(self, src, dest, from_code, to_code): def uss_convert_encoding_prev(self, src, dest, from_code, to_code): """For multiple files conversion, such as a USS path or MVS PDS data set, - use this method to split then do the conversion - - Arguments: - from_code: {str} -- The source code set of the input path - to_code: {str} -- The destination code set for the output path - src: {str} -- The input uss path or a file - dest: {str} -- The output uss path or a file - - Raises: - EncodeError: When direcotry is empty or copy multiple files to a single file - Returns: - boolean -- Indicate whether the conversion is successful or not + use this method to split then do the conversion. + + Parameters + ---------- + src : str + The input uss path or a file. + dest : str + The output uss path or a file. + from_code : str + The source code set of the input path. + to_code : str + The destination code set for the output path. + + Returns + ------- + bool + Indicate whether the conversion is successful or not. + + Raises + ------ + EncodeError + When directory is empty or copy multiple files to a single file. """ src = self._validate_path(src) dest = self._validate_path(dest) @@ -375,18 +472,28 @@ def mvs_convert_encoding( 2) MVS to USS 3) MVS to MVS - Arguments: - src: {str} -- The input MVS data set or USS path to be converted - dest: {str} -- The output MVS data set or USS path to be converted - from_code: {str} -- The source code set of the input MVS data set - to_code: {str} -- The destination code set of the output MVS data set - - Keyword Arguments: - src_type {[type]} -- The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS) (default: {None}) - dest_type {[type]} -- The output MVS data set type (default: {None}) - - Returns: - boolean -- Indicate whether the conversion is successful or not + Parameters + ---------- + src : str + The input MVS data set or USS path to be converted. + dest : str + The output MVS data set or USS path to be converted. + from_code : str + The source code set of the input MVS data set. + to_code : str + The destination code set of the output MVS data set. + + Keyword Parameters + ----------------- + src_type : str + The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS). + dest_type : str + The output MVS data set type. + + Returns + ------- + bool + Indicate whether the conversion is successful or not. """ src = self._validate_data_set_or_path(src) dest = self._validate_data_set_or_path(dest) @@ -458,11 +565,18 @@ def uss_tag_encoding(self, file_path, tag): """Tag the file/directory specified with the given code set. If `file_path` is a directory, all of the files and subdirectories will be tagged recursively. - Arguments: - file_path {str} -- Absolute file path to tag. - tag {str} -- Code set to tag the file/directory. - Raises: - TaggingError: When the chtag command fails. + + Parameters + ---------- + file_path : str + Absolute file path to tag. + tag : str + Code set to tag the file/directory. + + Raises + ------ + TaggingError + When the chtag command fails. """ is_dir = os.path.isdir(file_path) @@ -473,11 +587,18 @@ def uss_tag_encoding(self, file_path, tag): def uss_file_tag(self, file_path): """Returns the current tag set for a file. - Arguments: - file_path {str} -- USS path to the file. - Returns: - str -- Current tag set for the file, as returned by 'ls -T' - None -- If the file does not exist or the command fails. + + Parameters + ---------- + file_path : str + USS path to the file. + + Returns + ------- + str + Current tag set for the file, as returned by 'ls -T'. + None + If the file does not exist or the command fails. """ if not os.path.exists(file_path): return None @@ -500,12 +621,50 @@ def uss_file_tag(self, file_path): class EncodeError(Exception): def __init__(self, message): + """Error during encoding. + + Parameters + ---------- + message : str + Human readable string describing the exception. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = 'An error occurred during encoding: "{0}"'.format(message) super(EncodeError, self).__init__(self.msg) class TaggingError(Exception): def __init__(self, file_path, tag, rc, stdout, stderr): + """Error during tagging. + + Parameters + ---------- + file_path : str + File to tag. + tag : str + Tag to put in the file. + rc : int + Return code. + stdout : str + Standard output. + stderr : str + Standard error. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + rc : int + Return code. + stdout : str + Standard output. + stderr : str + Standard error. + """ self.msg = 'An error occurred during tagging of {0} to {1}'.format( file_path, tag @@ -518,5 +677,21 @@ def __init__(self, file_path, tag, rc, stdout, stderr): class MoveFileError(Exception): def __init__(self, src, dest, e): + """Error while moving a file. + + Parameters + ---------- + src : str + From where the file moves. + dest : str + To where the file moves. + e : str + Exception message. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = "Failed when moving {0} to {1}: {2}".format(src, dest, e) super().__init__(self.msg) From 5b239b1afe04ec4800b93e044f3857ebc10e0d0c Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 16 Apr 2024 08:46:04 -0700 Subject: [PATCH 340/495] [v1.10.0] [Enabler] Standardization of choices in modules (#1388) * Update zos_archive choices * Update zos_backup_restore choices * Update zos_copy choices * Update zos_data_set choices * Update module docs * Update zos_job_submit choices * Update zos_mount choices * Update zos_unarchive choices * Fix zos_archive and update its tests This also includes major work on zos_data_set since half of the test suite for zos_archive depends on creating data sets. * Update zos_backup_restore tests * Update zos_blockinfile tests * Update more modules * Updated more tests * Update zos_unarchive and zos_mount * Update zos_backup_restore unit tests * Update zos_mvs_raw * Update zos_copy tests * Fix some sanity issues * Fix zos_copy KSDS test * Update zos_copy some more * Fix ZFS call * Update zos_unarchive tests * Add massive changelog fragment * Fix call to zos_data_set * Fix more test issues in zos_fetch * Fix zos_find tests * Generate updated docs --- .../fragments/1388-lowercase-choices.yml | 87 +++++ docs/source/modules/zos_apf.rst | 68 ++-- docs/source/modules/zos_apf.rst-e | 318 +++++++++++++++ docs/source/modules/zos_archive.rst | 102 ++--- docs/source/modules/zos_backup_restore.rst | 80 ++-- docs/source/modules/zos_blockinfile.rst | 52 +-- docs/source/modules/zos_copy.rst | 226 ++++++----- docs/source/modules/zos_data_set.rst | 222 +++++------ docs/source/modules/zos_encode.rst | 32 +- docs/source/modules/zos_fetch.rst | 18 +- docs/source/modules/zos_find.rst | 20 +- docs/source/modules/zos_gather_facts.rst | 14 +- docs/source/modules/zos_job_output.rst | 16 +- docs/source/modules/zos_job_query.rst | 20 +- docs/source/modules/zos_job_submit.rst | 95 +++-- docs/source/modules/zos_lineinfile.rst | 68 ++-- docs/source/modules/zos_mount.rst | 124 +++--- docs/source/modules/zos_mvs_raw.rst | 364 +++++++++--------- docs/source/modules/zos_operator.rst | 2 +- .../modules/zos_operator_action_query.rst | 20 +- docs/source/modules/zos_ping.rst | 8 +- docs/source/modules/zos_script.rst | 32 +- docs/source/modules/zos_tso_command.rst | 4 +- docs/source/modules/zos_unarchive.rst | 68 ++-- docs/source/modules/zos_volume_init.rst | 34 +- plugins/action/zos_copy.py | 12 +- plugins/action/zos_job_submit.py | 6 +- plugins/action/zos_unarchive.py | 6 +- plugins/module_utils/data_set.py | 2 +- plugins/modules/zos_archive.py | 84 ++-- plugins/modules/zos_backup_restore.py | 32 +- plugins/modules/zos_copy.py | 93 ++--- plugins/modules/zos_data_set.py | 354 +++++++++-------- plugins/modules/zos_job_submit.py | 52 +-- plugins/modules/zos_mount.py | 138 +++---- plugins/modules/zos_mvs_raw.py | 252 ++++++------ plugins/modules/zos_unarchive.py | 62 +-- .../modules/test_zos_archive_func.py | 90 ++--- .../modules/test_zos_backup_restore.py | 20 +- .../modules/test_zos_blockinfile_func.py | 18 +- .../functional/modules/test_zos_copy_func.py | 326 ++++++++-------- .../modules/test_zos_data_set_func.py | 80 ++-- .../modules/test_zos_encode_func.py | 16 +- .../functional/modules/test_zos_fetch_func.py | 32 +- .../functional/modules/test_zos_find_func.py | 16 +- .../modules/test_zos_job_output_func.py | 4 +- .../modules/test_zos_job_query_func.py | 8 +- .../modules/test_zos_job_submit_func.py | 58 +-- .../modules/test_zos_lineinfile_func.py | 17 +- .../functional/modules/test_zos_mount_func.py | 38 +- .../modules/test_zos_mvs_raw_func.py | 86 ++--- .../modules/test_zos_unarchive_func.py | 104 ++--- tests/unit/test_zos_backup_restore_unit.py | 2 +- tests/unit/test_zos_mvs_raw_unit.py | 80 ++-- 54 files changed, 2302 insertions(+), 1880 deletions(-) create mode 100644 changelogs/fragments/1388-lowercase-choices.yml create mode 100644 docs/source/modules/zos_apf.rst-e diff --git a/changelogs/fragments/1388-lowercase-choices.yml b/changelogs/fragments/1388-lowercase-choices.yml new file mode 100644 index 000000000..0f14f42fe --- /dev/null +++ b/changelogs/fragments/1388-lowercase-choices.yml @@ -0,0 +1,87 @@ +breaking_changes: + - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + Suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. + Suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_backup_restore - option ``space_type`` no longer accepts uppercase + choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + Option ``space_type`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Option ``record_format`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Options inside ``batch`` no longer accept uppercase choices, users should + replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_submit - option ``location`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``fs_type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. + Option ``unmount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Option ``mount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Option ``tag_untagged`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + Option ``automove`` no longer accepts uppercase choices, users + should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboptions ``disposition_normal`` and ``disposition_abnormal`` of + ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. + This also applies when defining a ``dd_data_set`` inside ``dd_concat``. + Suboption ``space_type`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dd_unix`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Options inside ``dd_concat`` no longer accept uppercase choices, + users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + Suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + +trivial: + - zos_blockinfile - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_find - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_lineinfile - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_encode - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_fetch - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_output - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_query - updated tests to use lowercase options when calling + another module in the collection. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). \ No newline at end of file diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index e9a55c007..73d616e76 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -37,7 +37,7 @@ library state - Ensure that the library is added ``state=present`` or removed ``state=absent``. + Ensure that the library is added \ :literal:`state=present`\ or removed \ :literal:`state=absent`\ . The APF list format has to be "DYNAMIC". @@ -58,24 +58,24 @@ force_dynamic volume - The identifier for the volume containing the library specified in the ``library`` parameter. The values must be one the following. + The identifier for the volume containing the library specified in the \ :literal:`library`\ parameter. The values must be one the following. 1. The volume serial number. - 2. Six asterisks (******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If ``volume`` is not specified, ``library`` has to be cataloged. + If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If ``sms=True``, ``volume`` value will be ignored. + If \ :literal:`sms=True`\ , \ :literal:`volume`\ value will be ignored. | **required**: False | **type**: bool @@ -83,13 +83,13 @@ sms operation - Change APF list format to "DYNAMIC" ``operation=set_dynamic`` or "STATIC" ``operation=set_static`` + Change APF list format to "DYNAMIC" \ :literal:`operation=set\_dynamic`\ or "STATIC" \ :literal:`operation=set\_static`\ - Display APF list current format ``operation=check_format`` + Display APF list current format \ :literal:`operation=check\_format`\ - Display APF list entries when ``operation=list`` ``library``, ``volume`` and ``sms`` will be used as filters. + Display APF list entries when \ :literal:`operation=list`\ \ :literal:`library`\ , \ :literal:`volume`\ and \ :literal:`sms`\ will be used as filters. - If ``operation`` is not set, add or remove operation will be ignored. + If \ :literal:`operation`\ is not set, add or remove operation will be ignored. | **required**: False | **type**: str @@ -99,23 +99,23 @@ operation tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str persistent - Add/remove persistent entries to or from *data_set_name* + Add/remove persistent entries to or from \ :emphasis:`data\_set\_name`\ - ``library`` will not be persisted or removed if ``persistent=None`` + \ :literal:`library`\ will not be persisted or removed if \ :literal:`persistent=None`\ | **required**: False | **type**: dict data_set_name - The data set name used for persisting or removing a ``library`` from the APF list. + The data set name used for persisting or removing a \ :literal:`library`\ from the APF list. | **required**: True | **type**: str @@ -124,13 +124,13 @@ persistent marker The marker line template. - ``{mark}`` will be replaced with "BEGIN" and "END". + \ :literal:`{mark}`\ will be replaced with "BEGIN" and "END". - Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs. + Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. - ``{mark}`` length may not exceed 72 characters. + \ :literal:`{mark}`\ length may not exceed 72 characters. - The timestamp (<timestamp>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format + The timestamp (\<timestamp\>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format | **required**: False | **type**: str @@ -138,9 +138,9 @@ persistent backup - Creates a backup file or backup data set for *data_set_name*, including the timestamp information to ensure that you retrieve the original APF list defined in *data_set_name*". + Creates a backup file or backup data set for \ :emphasis:`data\_set\_name`\ , including the timestamp information to ensure that you retrieve the original APF list defined in \ :emphasis:`data\_set\_name`\ ". - *backup_name* can be used to specify a backup file name if *backup=true*. + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . The backup file name will be return on either success or failure of module execution such that data can be retrieved. @@ -152,11 +152,11 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source *data_set_name* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source \ :emphasis:`data\_set\_name`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup_name must be an MVS data set name. + If the source is an MVS data set, the backup\_name must be an MVS data set name. - If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``. + If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -168,9 +168,9 @@ persistent batch A list of dictionaries for adding or removing libraries. - This is mutually exclusive with ``library``, ``volume``, ``sms`` + This is mutually exclusive with \ :literal:`library`\ , \ :literal:`volume`\ , \ :literal:`sms`\ - Can be used with ``persistent`` + Can be used with \ :literal:`persistent`\ | **required**: False | **type**: list @@ -185,24 +185,24 @@ batch volume - The identifier for the volume containing the library specified on the ``library`` parameter. The values must be one of the following. + The identifier for the volume containing the library specified on the \ :literal:`library`\ parameter. The values must be one of the following. 1. The volume serial number - 2. Six asterisks (******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If ``volume`` is not specified, ``library`` has to be cataloged. + If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If true ``volume`` will be ignored. + If true \ :literal:`volume`\ will be ignored. | **required**: False | **type**: bool @@ -283,9 +283,9 @@ Return Values stdout The stdout from ZOAU command apfadm. Output varies based on the type of operation. - state> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm + state\> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm - operation> stdout of operation options list> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set_dynamic> Set to DYNAMIC set_static> Set to STATIC check_format> DYNAMIC or STATIC + operation\> stdout of operation options list\> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set\_dynamic\> Set to DYNAMIC set\_static\> Set to STATIC check\_format\> DYNAMIC or STATIC | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_apf.rst-e b/docs/source/modules/zos_apf.rst-e new file mode 100644 index 000000000..ec8e6824c --- /dev/null +++ b/docs/source/modules/zos_apf.rst-e @@ -0,0 +1,318 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_apf.py + +.. _zos_apf_module: + + +zos_apf -- Add or remove libraries to Authorized Program Facility (APF) +======================================================================= + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- Adds or removes libraries to Authorized Program Facility (APF). +- Manages APF statement persistent entries to a data set or data set member. +- Changes APF list format to "DYNAMIC" or "STATIC". +- Gets the current APF list entries. + + + + + +Parameters +---------- + + +library + The library name to be added or removed from the APF list. + + | **required**: False + | **type**: str + + +state + Ensure that the library is added \ :literal:`state=present`\ or removed \ :literal:`state=absent`\ . + + The APF list format has to be "DYNAMIC". + + | **required**: False + | **type**: str + | **default**: present + | **choices**: absent, present + + +force_dynamic + Will force the APF list format to "DYNAMIC" before adding or removing libraries. + + If the format is "STATIC", the format will be changed to "DYNAMIC". + + | **required**: False + | **type**: bool + | **default**: False + + +volume + The identifier for the volume containing the library specified in the \ :literal:`library`\ parameter. The values must be one the following. + + 1. The volume serial number. + + 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + + 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + + If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + + | **required**: False + | **type**: str + + +sms + Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + + If \ :literal:`sms=True`\ , \ :literal:`volume`\ value will be ignored. + + | **required**: False + | **type**: bool + | **default**: False + + +operation + Change APF list format to "DYNAMIC" \ :literal:`operation=set\_dynamic`\ or "STATIC" \ :literal:`operation=set\_static`\ + + Display APF list current format \ :literal:`operation=check\_format`\ + + Display APF list entries when \ :literal:`operation=list`\ \ :literal:`library`\ , \ :literal:`volume`\ and \ :literal:`sms`\ will be used as filters. + + If \ :literal:`operation`\ is not set, add or remove operation will be ignored. + + | **required**: False + | **type**: str + | **choices**: set_dynamic, set_static, check_format, list + + +tmp_hlq + Override the default high level qualifier (HLQ) for temporary and backup datasets. + + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + + | **required**: False + | **type**: str + + +persistent + Add/remove persistent entries to or from \ :emphasis:`data\_set\_name`\ + + \ :literal:`library`\ will not be persisted or removed if \ :literal:`persistent=None`\ + + | **required**: False + | **type**: dict + + + data_set_name + The data set name used for persisting or removing a \ :literal:`library`\ from the APF list. + + | **required**: True + | **type**: str + + + marker + The marker line template. + + \ :literal:`{mark}`\ will be replaced with "BEGIN" and "END". + + Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. + + \ :literal:`{mark}`\ length may not exceed 72 characters. + + The timestamp (\<timestamp\>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format + + | **required**: False + | **type**: str + | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> */ + + + backup + Creates a backup file or backup data set for \ :emphasis:`data\_set\_name`\ , including the timestamp information to ensure that you retrieve the original APF list defined in \ :emphasis:`data\_set\_name`\ ". + + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + + The backup file name will be return on either success or failure of module execution such that data can be retrieved. + + | **required**: False + | **type**: bool + | **default**: False + + + backup_name + Specify the USS file name or data set name for the destination backup. + + If the source \ :emphasis:`data\_set\_name`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. + + If the source is an MVS data set, the backup\_name must be an MVS data set name. + + If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + + If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. + + | **required**: False + | **type**: str + + + +batch + A list of dictionaries for adding or removing libraries. + + This is mutually exclusive with \ :literal:`library`\ , \ :literal:`volume`\ , \ :literal:`sms`\ + + Can be used with \ :literal:`persistent`\ + + | **required**: False + | **type**: list + | **elements**: dict + + + library + The library name to be added or removed from the APF list. + + | **required**: True + | **type**: str + + + volume + The identifier for the volume containing the library specified on the \ :literal:`library`\ parameter. The values must be one of the following. + + 1. The volume serial number + + 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + + 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + + If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + + | **required**: False + | **type**: str + + + sms + Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + + If true \ :literal:`volume`\ will be ignored. + + | **required**: False + | **type**: bool + | **default**: False + + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Add a library to the APF list + zos_apf: + library: SOME.SEQUENTIAL.DATASET + volume: T12345 + - name: Add a library (cataloged) to the APF list and persistence + zos_apf: + library: SOME.SEQUENTIAL.DATASET + force_dynamic: True + persistent: + data_set_name: SOME.PARTITIONED.DATASET(MEM) + - name: Remove a library from the APF list and persistence + zos_apf: + state: absent + library: SOME.SEQUENTIAL.DATASET + volume: T12345 + persistent: + data_set_name: SOME.PARTITIONED.DATASET(MEM) + - name: Batch libraries with custom marker, persistence for the APF list + zos_apf: + persistent: + data_set_name: "SOME.PARTITIONED.DATASET(MEM)" + marker: "/* {mark} PROG001 USR0010 */" + batch: + - library: SOME.SEQ.DS1 + - library: SOME.SEQ.DS2 + sms: True + - library: SOME.SEQ.DS3 + volume: T12345 + - name: Print the APF list matching library pattern or volume serial number + zos_apf: + operation: list + library: SOME.SEQ.* + volume: T12345 + - name: Set the APF list format to STATIC + zos_apf: + operation: set_static + + + + +Notes +----- + +.. note:: + It is the playbook author or user's responsibility to ensure they have appropriate authority to the RACF® FACILITY resource class. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. + + To add or delete the APF list entry for library libname, you must have UPDATE authority to the RACF® FACILITY resource class entity CSVAPF.libname, or there must be no FACILITY class profile that protects that entity. + + To change the format of the APF list to dynamic, you must have UPDATE authority to the RACF FACILITY resource class profile CSVAPF.MVS.SETPROG.FORMAT.DYNAMIC, or there must be no FACILITY class profile that protects that entity. + + To change the format of the APF list back to static, you must have UPDATE authority to the RACF FACILITY resource class profile CSVAPF.MVS.SETPROG.FORMAT.STATIC, or there must be no FACILITY class profile that protects that entity. + + + + + + + +Return Values +------------- + + +stdout + The stdout from ZOAU command apfadm. Output varies based on the type of operation. + + state\> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm + + operation\> stdout of operation options list\> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set\_dynamic\> Set to DYNAMIC set\_static\> Set to STATIC check\_format\> DYNAMIC or STATIC + + | **returned**: always + | **type**: str + +stderr + The error messages from ZOAU command apfadm + + | **returned**: always + | **type**: str + | **sample**: BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list. + +rc + The return code from ZOAU command apfadm + + | **returned**: always + | **type**: int + +msg + The module messages + + | **returned**: failure + | **type**: str + | **sample**: Parameter verification failed + +backup_name + Name of the backup file or data set that was created. + + | **returned**: if backup=true, always + | **type**: str + diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index fe93474f0..3249f3ba8 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -20,7 +20,7 @@ Synopsis - Sources for archiving must be on the remote z/OS system. - Supported sources are USS (UNIX System Services) or z/OS data sets. - The archive remains on the remote z/OS system. -- For supported archive formats, see option ``format``. +- For supported archive formats, see option \ :literal:`format`\ . @@ -35,7 +35,7 @@ src USS file paths should be absolute paths. - MVS data sets supported types are: ``SEQ``, ``PDS``, ``PDSE``. + MVS data sets supported types are: \ :literal:`SEQ`\ , \ :literal:`PDS`\ , \ :literal:`PDSE`\ . VSAMs are not supported. @@ -68,7 +68,7 @@ format terse_pack - Compression option for use with the terse format, *name=terse*. + Compression option for use with the terse format, \ :emphasis:`name=terse`\ . Pack will compress records in a data set so that the output results in lossless data compression. @@ -78,7 +78,7 @@ format | **required**: False | **type**: str - | **choices**: PACK, SPACK + | **choices**: pack, spack xmit_log_data_set @@ -88,14 +88,14 @@ format If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB - When providing the *xmit_log_data_set* name, ensure there is adequate space. + When providing the \ :emphasis:`xmit\_log\_data\_set`\ name, ensure there is adequate space. | **required**: False | **type**: str use_adrdssu - If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using ``xmit`` or ``terse``. + If set to true, the \ :literal:`zos\_archive`\ module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using \ :literal:`xmit`\ or \ :literal:`terse`\ . | **required**: False | **type**: bool @@ -107,19 +107,19 @@ format dest The remote absolute path or data set where the archive should be created. - *dest* can be a USS file or MVS data set name. + \ :emphasis:`dest`\ can be a USS file or MVS data set name. - If *dest* has missing parent directories, they will be created. + If \ :emphasis:`dest`\ has missing parent directories, they will be created. - If *dest* is a nonexistent USS file, it will be created. + If \ :emphasis:`dest`\ is a nonexistent USS file, it will be created. - If *dest* is an existing file or data set and *force=true*, the existing *dest* will be deleted and recreated with attributes defined in the *dest_data_set* option or computed by the module. + If \ :emphasis:`dest`\ is an existing file or data set and \ :emphasis:`force=true`\ , the existing \ :emphasis:`dest`\ will be deleted and recreated with attributes defined in the \ :emphasis:`dest\_data\_set`\ option or computed by the module. - If *dest* is an existing file or data set and *force=false* or not specified, the module exits with a note to the user. + If \ :emphasis:`dest`\ is an existing file or data set and \ :emphasis:`force=false`\ or not specified, the module exits with a note to the user. - Destination data set attributes can be set using *dest_data_set*. + Destination data set attributes can be set using \ :emphasis:`dest\_data\_set`\ . - Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the *dest_data_set* option will improve performance. + Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the \ :emphasis:`dest\_data\_set`\ option will improve performance. | **required**: True | **type**: str @@ -128,9 +128,9 @@ dest exclude Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from src list and glob expansion. - Patterns (wildcards) can contain one of the following, `?`, `*`. + Patterns (wildcards) can contain one of the following, \`?\`, \`\*\`. - * matches everything. + \* matches everything. ? matches any single character. @@ -144,7 +144,7 @@ group When left unspecified, it uses the current group of the current use unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if ``dest`` is USS, otherwise ignored. + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. | **required**: False | **type**: str @@ -153,13 +153,13 @@ group mode The permission of the destination archive file. - If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. The mode may also be specified as a symbolic mode (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. - *mode=preserve* means that the file will be given the same permissions as the src file. + \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the src file. | **required**: False | **type**: str @@ -170,14 +170,14 @@ owner When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if ``dest`` is USS, otherwise ignored. + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. | **required**: False | **type**: str remove - Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *src*. + Remove any added source files , trees or data sets after module \ `zos\_archive <./zos_archive.html>`__\ adds them to the archive. Source files, trees and data sets are identified with option \ :emphasis:`src`\ . | **required**: False | **type**: bool @@ -185,7 +185,7 @@ remove dest_data_set - Data set attributes to customize a ``dest`` data set to be archived into. + Data set attributes to customize a \ :literal:`dest`\ data set to be archived into. | **required**: False | **type**: dict @@ -203,23 +203,23 @@ dest_data_set | **required**: False | **type**: str - | **default**: SEQ - | **choices**: SEQ + | **default**: seq + | **choices**: seq space_primary - If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int space_secondary - If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -228,21 +228,21 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **choices**: K, M, G, CYL, TRK + | **choices**: k, m, g, cyl, trk record_format - If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`FB`\ ) - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str - | **choices**: FB, VB, FBA, VBA, U + | **choices**: fb, vb, fba, vba, u record_length @@ -313,18 +313,18 @@ dest_data_set tmp_hlq Override the default high level qualifier (HLQ) for temporary data sets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str force - If set to ``true`` and the remote file or data set ``dest`` will be deleted. Otherwise it will be created with the ``dest_data_set`` attributes or default values if ``dest_data_set`` is not specified. + If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ will be deleted. Otherwise it will be created with the \ :literal:`dest\_data\_set`\ attributes or default values if \ :literal:`dest\_data\_set`\ is not specified. - If set to ``false``, the file or data set will only be copied if the destination does not exist. + If set to \ :literal:`false`\ , the file or data set will only be copied if the destination does not exist. - If set to ``false`` and destination exists, the module exits with a note to the user. + If set to \ :literal:`false`\ and destination exists, the module exits with a note to the user. | **required**: False | **type**: bool @@ -373,7 +373,7 @@ Examples format: name: terse format_options: - terse_pack: "SPACK" + terse_pack: "spack" use_adrdssu: True # Use a pattern to store @@ -392,11 +392,11 @@ Notes ----- .. note:: - This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos_fetch to retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. + This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos\_fetch to retrieve to the controller and then zos\_copy or zos\_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. - When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. + When packing and using \ :literal:`use\_adrdssu`\ flag the module will take up to two times the space indicated in \ :literal:`dest\_data\_set`\ . - tar, zip, bz2 and pax are archived using python ``tarfile`` library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. + tar, zip, bz2 and pax are archived using python \ :literal:`tarfile`\ library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. @@ -416,27 +416,27 @@ Return Values state - The state of the input ``src``. + The state of the input \ :literal:`src`\ . - ``absent`` when the source files or data sets were removed. + \ :literal:`absent`\ when the source files or data sets were removed. - ``present`` when the source files or data sets were not removed. + \ :literal:`present`\ when the source files or data sets were not removed. - ``incomplete`` when ``remove`` was true and the source files or data sets were not removed. + \ :literal:`incomplete`\ when \ :literal:`remove`\ was true and the source files or data sets were not removed. | **returned**: always | **type**: str dest_state - The state of the *dest* file or data set. + The state of the \ :emphasis:`dest`\ file or data set. - ``absent`` when the file does not exist. + \ :literal:`absent`\ when the file does not exist. - ``archive`` when the file is an archive. + \ :literal:`archive`\ when the file is an archive. - ``compress`` when the file is compressed, but not an archive. + \ :literal:`compress`\ when the file is compressed, but not an archive. - ``incomplete`` when the file is an archive, but some files under *src* were not found. + \ :literal:`incomplete`\ when the file is an archive, but some files under \ :emphasis:`src`\ were not found. | **returned**: success | **type**: str @@ -454,7 +454,7 @@ archived | **type**: list arcroot - If ``src`` is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. + If \ :literal:`src`\ is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index d70efc7a1..6833279fa 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -47,34 +47,34 @@ data_sets include - When *operation=backup*, specifies a list of data sets or data set patterns to include in the backup. + When \ :emphasis:`operation=backup`\ , specifies a list of data sets or data set patterns to include in the backup. - When *operation=restore*, specifies a list of data sets or data set patterns to include when restoring from a backup. + When \ :emphasis:`operation=restore`\ , specifies a list of data sets or data set patterns to include when restoring from a backup. - The single asterisk, ``*``, is used in place of exactly one qualifier. In addition, it can be used to indicate to DFSMSdss that only part of a qualifier has been specified. + The single asterisk, \ :literal:`\*`\ , is used in place of exactly one qualifier. In addition, it can be used to indicate to DFSMSdss that only part of a qualifier has been specified. - When used with other qualifiers, the double asterisk, ``**``, indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. + When used with other qualifiers, the double asterisk, \ :literal:`\*\*`\ , indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. Two asterisks are the maximum permissible in a qualifier. If there are two asterisks in a qualifier, they must be the first and last characters. - A question mark ``?`` or percent sign ``%`` matches a single character. + A question mark \ :literal:`?`\ or percent sign \ :literal:`%`\ matches a single character. | **required**: False | **type**: raw exclude - When *operation=backup*, specifies a list of data sets or data set patterns to exclude from the backup. + When \ :emphasis:`operation=backup`\ , specifies a list of data sets or data set patterns to exclude from the backup. - When *operation=restore*, specifies a list of data sets or data set patterns to exclude when restoring from a backup. + When \ :emphasis:`operation=restore`\ , specifies a list of data sets or data set patterns to exclude when restoring from a backup. - The single asterisk, ``*``, is used in place of exactly one qualifier. In addition, it can be used to indicate that only part of a qualifier has been specified." + The single asterisk, \ :literal:`\*`\ , is used in place of exactly one qualifier. In addition, it can be used to indicate that only part of a qualifier has been specified." - When used with other qualifiers, the double asterisk, ``**``, indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. + When used with other qualifiers, the double asterisk, \ :literal:`\*\*`\ , indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. Two asterisks are the maximum permissible in a qualifier. If there are two asterisks in a qualifier, they must be the first and last characters. - A question mark ``?`` or percent sign ``%`` matches a single character. + A question mark \ :literal:`?`\ or percent sign \ :literal:`%`\ matches a single character. | **required**: False | **type**: raw @@ -84,22 +84,22 @@ data_sets volume This applies to both data set restores and volume restores. - When *operation=backup* and *data_sets* are provided, specifies the volume that contains the data sets to backup. + When \ :emphasis:`operation=backup`\ and \ :emphasis:`data\_sets`\ are provided, specifies the volume that contains the data sets to backup. - When *operation=restore*, specifies the volume the backup should be restored to. + When \ :emphasis:`operation=restore`\ , specifies the volume the backup should be restored to. - *volume* is required when restoring a full volume backup. + \ :emphasis:`volume`\ is required when restoring a full volume backup. | **required**: False | **type**: str full_volume - When *operation=backup* and *full_volume=True*, specifies that the entire volume provided to *volume* should be backed up. + When \ :emphasis:`operation=backup`\ and \ :emphasis:`full\_volume=True`\ , specifies that the entire volume provided to \ :emphasis:`volume`\ should be backed up. - When *operation=restore* and *full_volume=True*, specifies that the volume should be restored (default is dataset). + When \ :emphasis:`operation=restore`\ and \ :emphasis:`full\_volume=True`\ , specifies that the volume should be restored (default is dataset). - *volume* must be provided when *full_volume=True*. + \ :emphasis:`volume`\ must be provided when \ :emphasis:`full\_volume=True`\ . | **required**: False | **type**: bool @@ -109,18 +109,18 @@ full_volume temp_volume Specifies a particular volume on which the temporary data sets should be created during the backup and restore process. - When *operation=backup* and *backup_name* is a data set, specifies the volume the backup should be placed in. + When \ :emphasis:`operation=backup`\ and \ :emphasis:`backup\_name`\ is a data set, specifies the volume the backup should be placed in. | **required**: False | **type**: str backup_name - When *operation=backup*, the destination data set or UNIX file to hold the backup. + When \ :emphasis:`operation=backup`\ , the destination data set or UNIX file to hold the backup. - When *operation=restore*, the destination data set or UNIX file backup to restore. + When \ :emphasis:`operation=restore`\ , the destination data set or UNIX file backup to restore. - There are no enforced conventions for backup names. However, using a common extension like ``.dzp`` for UNIX files and ``.DZP`` for data sets will improve readability. + There are no enforced conventions for backup names. However, using a common extension like \ :literal:`.dzp`\ for UNIX files and \ :literal:`.DZP`\ for data sets will improve readability. | **required**: True | **type**: str @@ -135,9 +135,9 @@ recover overwrite - When *operation=backup*, specifies if an existing data set or UNIX file matching *backup_name* should be deleted. + When \ :emphasis:`operation=backup`\ , specifies if an existing data set or UNIX file matching \ :emphasis:`backup\_name`\ should be deleted. - When *operation=restore*, specifies if the module should overwrite existing data sets with matching name on the target device. + When \ :emphasis:`operation=restore`\ , specifies if the module should overwrite existing data sets with matching name on the target device. | **required**: False | **type**: bool @@ -145,35 +145,35 @@ overwrite sms_storage_class - When *operation=restore*, specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. + When \ :emphasis:`operation=restore`\ , specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. - When *operation=backup*, specifies the storage class to use for temporary data sets created during backup process. + When \ :emphasis:`operation=backup`\ , specifies the storage class to use for temporary data sets created during backup process. - If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + If neither of \ :emphasis:`sms\_storage\_class`\ or \ :emphasis:`sms\_management\_class`\ are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. | **required**: False | **type**: str sms_management_class - When *operation=restore*, specifies the management class to use. The management class will also be used for temporary data sets created during restore process. + When \ :emphasis:`operation=restore`\ , specifies the management class to use. The management class will also be used for temporary data sets created during restore process. - When *operation=backup*, specifies the management class to use for temporary data sets created during backup process. + When \ :emphasis:`operation=backup`\ , specifies the management class to use for temporary data sets created during backup process. - If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + If neither of \ :emphasis:`sms\_storage\_class`\ or \ :emphasis:`sms\_management\_class`\ are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. | **required**: False | **type**: str space - If *operation=backup*, specifies the amount of space to allocate for the backup. Please note that even when backing up to a UNIX file, backup contents will be temporarily held in a data set. + If \ :emphasis:`operation=backup`\ , specifies the amount of space to allocate for the backup. Please note that even when backing up to a UNIX file, backup contents will be temporarily held in a data set. - If *operation=restore*, specifies the amount of space to allocate for data sets temporarily created during the restore process. + If \ :emphasis:`operation=restore`\ , specifies the amount of space to allocate for data sets temporarily created during the restore process. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . - When *full_volume=True*, *space* defaults to ``1``, otherwise default is ``25`` + When \ :emphasis:`full\_volume=True`\ , \ :emphasis:`space`\ defaults to \ :literal:`1`\ , otherwise default is \ :literal:`25`\ | **required**: False | **type**: int @@ -182,13 +182,13 @@ space space_type The unit of measurement to use when defining data set space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . - When *full_volume=True*, *space_type* defaults to ``G``, otherwise default is ``M`` + When \ :emphasis:`full\_volume=True`\ , \ :emphasis:`space\_type`\ defaults to \ :literal:`g`\ , otherwise default is \ :literal:`m`\ | **required**: False | **type**: str - | **choices**: K, M, G, CYL, TRK + | **choices**: k, m, g, cyl, trk hlq @@ -203,7 +203,7 @@ hlq tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup data sets. - The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used. + The default HLQ is the Ansible user that executes the module and if that is not available, then the value of \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -251,7 +251,7 @@ Examples include: user.** backup_name: MY.BACKUP.DZP space: 100 - space_type: M + space_type: m - name: Backup all datasets matching the pattern USER.** that are present on the volume MYVOL1 to data set MY.BACKUP.DZP, @@ -263,7 +263,7 @@ Examples volume: MYVOL1 backup_name: MY.BACKUP.DZP space: 100 - space_type: M + space_type: m - name: Backup an entire volume, MYVOL1, to the UNIX file /tmp/temp_backup.dzp, allocate 1GB for data sets used in backup process. @@ -273,7 +273,7 @@ Examples volume: MYVOL1 full_volume: yes space: 1 - space_type: G + space_type: g - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. Use z/OS username as new HLQ. @@ -317,7 +317,7 @@ Examples full_volume: yes backup_name: MY.BACKUP.DZP space: 1 - space_type: G + space_type: g - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. Specify DB2SMS10 for the SMS storage and management classes to use for the restored diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index f3eef5967..8cd6f756c 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -38,9 +38,9 @@ src state - Whether the block should be inserted or replaced using *state=present*. + Whether the block should be inserted or replaced using \ :emphasis:`state=present`\ . - Whether the block should be removed using *state=absent*. + Whether the block should be removed using \ :emphasis:`state=absent`\ . | **required**: False | **type**: str @@ -51,9 +51,9 @@ state marker The marker line template. - ``{mark}`` will be replaced with the values ``in marker_begin`` (default="BEGIN") and ``marker_end`` (default="END"). + \ :literal:`{mark}`\ will be replaced with the values \ :literal:`in marker\_begin`\ (default="BEGIN") and \ :literal:`marker\_end`\ (default="END"). - Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs. + Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. | **required**: False | **type**: str @@ -63,7 +63,7 @@ marker block The text to insert inside the marker lines. - Multi-line can be separated by '\n'. + Multi-line can be separated by '\\n'. Any double-quotation marks will be removed. @@ -74,11 +74,11 @@ block insertafter If specified, the block will be inserted after the last match of the specified regular expression. - A special value ``EOF`` for inserting a block at the end of the file is available. + A special value \ :literal:`EOF`\ for inserting a block at the end of the file is available. - If a specified regular expression has no matches, ``EOF`` will be used instead. + If a specified regular expression has no matches, \ :literal:`EOF`\ will be used instead. - Choices are EOF or '*regex*'. + Choices are EOF or '\*regex\*'. Default is EOF. @@ -89,18 +89,18 @@ insertafter insertbefore If specified, the block will be inserted before the last match of specified regular expression. - A special value ``BOF`` for inserting the block at the beginning of the file is available. + A special value \ :literal:`BOF`\ for inserting the block at the beginning of the file is available. If a specified regular expression has no matches, the block will be inserted at the end of the file. - Choices are BOF or '*regex*'. + Choices are BOF or '\*regex\*'. | **required**: False | **type**: str marker_begin - This will be inserted at ``{mark}`` in the opening ansible block marker. + This will be inserted at \ :literal:`{mark}`\ in the opening ansible block marker. | **required**: False | **type**: str @@ -108,7 +108,7 @@ marker_begin marker_end - This will be inserted at ``{mark}`` in the closing ansible block marker. + This will be inserted at \ :literal:`{mark}`\ in the closing ansible block marker. | **required**: False | **type**: str @@ -116,9 +116,9 @@ marker_end backup - Specifies whether a backup of destination should be created before editing the source *src*. + Specifies whether a backup of destination should be created before editing the source \ :emphasis:`src`\ . - When set to ``true``, the module creates a backup file or data set. + When set to \ :literal:`true`\ , the module creates a backup file or data set. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -130,15 +130,15 @@ backup backup_name Specify the USS file name or data set name for the destination backup. - If the source *src* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source \ :emphasis:`src`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup_name name must be an MVS data set name, and the dataset must not be preallocated. + If the source is an MVS data set, the backup\_name name must be an MVS data set name, and the dataset must not be preallocated. - If the backup_name is not provided, the default backup_name name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. + If the backup\_name is not provided, the default backup\_name name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. - If *src* is a data set member and backup_name is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + If \ :emphasis:`src`\ is a data set member and backup\_name is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. | **required**: False | **type**: str @@ -147,14 +147,14 @@ backup_name tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str encoding - The character set of the source *src*. `zos_blockinfile <./zos_blockinfile.html>`_ requires it to be provided with correct encoding to read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. + The character set of the source \ :emphasis:`src`\ . \ `zos\_blockinfile <./zos_blockinfile.html>`__\ requires it to be provided with correct encoding to read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -168,7 +168,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. + The \ :literal:`force`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . | **required**: False | **type**: bool @@ -290,13 +290,13 @@ Notes .. note:: It is the playbook author or user's responsibility to avoid files that should not be encoded, such as binary files. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. - All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. The `zos_data_set <./zos_data_set.html>`_ module can be used to catalog uncataloged data sets. + All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. The \ `zos\_data\_set <./zos_data_set.html>`__\ module can be used to catalog uncataloged data sets. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . - When using ``with_*`` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. + When using \`\`with\_\*\`\` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. - When more then one block should be handled in a file you must change the *marker* per task. + When more then one block should be handled in a file you must change the \ :emphasis:`marker`\ per task. @@ -315,7 +315,7 @@ Return Values changed - Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. + Indicates if the source was modified. Value of 1 represents \`true\`, otherwise \`false\`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 00e274b00..5ea5bf3ef 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -16,7 +16,7 @@ zos_copy -- Copy data to z/OS Synopsis -------- -- The `zos_copy <./zos_copy.html>`_ module copies a file or data set from a local or a remote machine to a location on the remote machine. +- The \ `zos\_copy <./zos_copy.html>`__\ module copies a file or data set from a local or a remote machine to a location on the remote machine. @@ -27,17 +27,17 @@ Parameters asa_text - If set to ``true``, indicates that either ``src`` or ``dest`` or both contain ASA control characters. + If set to \ :literal:`true`\ , indicates that either \ :literal:`src`\ or \ :literal:`dest`\ or both contain ASA control characters. - When ``src`` is a USS file and ``dest`` is a data set, the copy will preserve ASA control characters in the destination. + When \ :literal:`src`\ is a USS file and \ :literal:`dest`\ is a data set, the copy will preserve ASA control characters in the destination. - When ``src`` is a data set containing ASA control characters and ``dest`` is a USS file, the copy will put all control characters as plain text in the destination. + When \ :literal:`src`\ is a data set containing ASA control characters and \ :literal:`dest`\ is a USS file, the copy will put all control characters as plain text in the destination. - If ``dest`` is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). + If \ :literal:`dest`\ is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). - If neither ``src`` or ``dest`` have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. + If neither \ :literal:`src`\ or \ :literal:`dest`\ have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. - This option is only valid for text files. If ``is_binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. + This option is only valid for text files. If \ :literal:`is\_binary`\ is \ :literal:`true`\ or \ :literal:`executable`\ is \ :literal:`true`\ as well, the module will fail. | **required**: False | **type**: bool @@ -47,7 +47,7 @@ asa_text backup Specifies whether a backup of the destination should be created before copying data. - When set to ``true``, the module creates a backup file or data set. + When set to \ :literal:`true`\ , the module creates a backup file or data set. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -59,24 +59,24 @@ backup backup_name Specify a unique USS file name or data set name for the destination backup. - If the destination ``dest`` is a USS file or path, the ``backup_name`` must be an absolute path name. + If the destination \ :literal:`dest`\ is a USS file or path, the \ :literal:`backup\_name`\ must be an absolute path name. - If the destination is an MVS data set name, the ``backup_name`` provided must meet data set naming conventions of one or more qualifiers, each from one to eight characters long, that are delimited by periods. + If the destination is an MVS data set name, the \ :literal:`backup\_name`\ provided must meet data set naming conventions of one or more qualifiers, each from one to eight characters long, that are delimited by periods. - If the ``backup_name`` is not provided, the default ``backup_name`` will be used. If the ``dest`` is a USS file or USS path, the name of the backup file will be the destination file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the ``dest`` is an MVS data set, it will be a data set with a randomly generated name. + If the \ :literal:`backup\_name`\ is not provided, the default \ :literal:`backup\_name`\ will be used. If the \ :literal:`dest`\ is a USS file or USS path, the name of the backup file will be the destination file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the \ :literal:`dest`\ is an MVS data set, it will be a data set with a randomly generated name. - If ``dest`` is a data set member and ``backup_name`` is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + If \ :literal:`dest`\ is a data set member and \ :literal:`backup\_name`\ is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. | **required**: False | **type**: str content - When used instead of ``src``, sets the contents of a file or data set directly to the specified value. + When used instead of \ :literal:`src`\ , sets the contents of a file or data set directly to the specified value. - Works only when ``dest`` is a USS file, sequential data set, or a partitioned data set member. + Works only when \ :literal:`dest`\ is a USS file, sequential data set, or a partitioned data set member. - If ``dest`` is a directory, then content will be copied to ``/path/to/dest/inline_copy``. + If \ :literal:`dest`\ is a directory, then content will be copied to \ :literal:`/path/to/dest/inline\_copy`\ . | **required**: False | **type**: str @@ -85,27 +85,27 @@ content dest The remote absolute path or data set where the content should be copied to. - ``dest`` can be a USS file, directory or MVS data set name. + \ :literal:`dest`\ can be a USS file, directory or MVS data set name. - If ``dest`` has missing parent directories, they will be created. + If \ :literal:`dest`\ has missing parent directories, they will be created. - If ``dest`` is a nonexistent USS file, it will be created. + If \ :literal:`dest`\ is a nonexistent USS file, it will be created. - If ``dest`` is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the copy will fail. + If \ :literal:`dest`\ is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the module will fail. - If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. + If \ :literal:`dest`\ is a nonexistent data set, it will be created following the process outlined here and in the \ :literal:`volume`\ option. - If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. + If \ :literal:`dest`\ is a nonexistent data set, the attributes assigned will depend on the type of \ :literal:`src`\ . If \ :literal:`src`\ is a USS file, \ :literal:`dest`\ will have a Fixed Block (FB) record format and the remaining attributes will be computed. If \ :emphasis:`is\_binary=true`\ , \ :literal:`dest`\ will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If \ :emphasis:`executable=true`\ ,\ :literal:`dest`\ will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. - When ``dest`` is a data set, precedence rules apply. If ``dest_data_set`` is set, this will take precedence over an existing data set. If ``dest`` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, ``dest`` will be created with the same attributes of ``src``. + When \ :literal:`dest`\ is a data set, precedence rules apply. If \ :literal:`dest\_data\_set`\ is set, this will take precedence over an existing data set. If \ :literal:`dest`\ is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, \ :literal:`dest`\ will be created with the same attributes of \ :literal:`src`\ . - When the ``dest`` is an existing VSAM (KSDS) or VSAM (ESDS), then source can be an ESDS, a KSDS or an RRDS. The VSAM (KSDS) or VSAM (ESDS) ``dest`` will be deleted and recreated following the process outlined in the ``volume`` option. + When the \ :literal:`dest`\ is an existing VSAM (KSDS) or VSAM (ESDS), then source can be an ESDS, a KSDS or an RRDS. The VSAM (KSDS) or VSAM (ESDS) \ :literal:`dest`\ will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. - When the ``dest`` is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined in the ``volume`` option. + When the \ :literal:`dest`\ is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. - When ``dest`` is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the ``volume`` option. + When \ :literal:`dest`\ is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. - When ``dest`` is a data set, you can override storage management rules by specifying ``volume`` if the storage class being used has GUARANTEED_SPACE=YES specified, otherwise, the allocation will fail. See ``volume`` for more volume related processes. + When \ :literal:`dest`\ is a data set, you can override storage management rules by specifying \ :literal:`volume`\ if the storage class being used has GUARANTEED\_SPACE=YES specified, otherwise, the allocation will fail. See \ :literal:`volume`\ for more volume related processes. | **required**: True | **type**: str @@ -114,9 +114,9 @@ dest encoding Specifies which encodings the destination file or data set should be converted from and to. - If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. + If \ :literal:`encoding`\ is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. - Only valid if ``is_binary`` is false. + Only valid if \ :literal:`is\_binary`\ is false. | **required**: False | **type**: dict @@ -132,7 +132,7 @@ encoding to The encoding to be converted to - | **required**: True + | **required**: False | **type**: str @@ -140,22 +140,22 @@ encoding tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str force - If set to ``true`` and the remote file or data set ``dest`` is empty, the ``dest`` will be reused. + If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ is empty, the \ :literal:`dest`\ will be reused. - If set to ``true`` and the remote file or data set ``dest`` is NOT empty, the ``dest`` will be deleted and recreated with the ``src`` data set attributes, otherwise it will be recreated with the ``dest`` data set attributes. + If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ is NOT empty, the \ :literal:`dest`\ will be deleted and recreated with the \ :literal:`src`\ data set attributes, otherwise it will be recreated with the \ :literal:`dest`\ data set attributes. - To backup data before any deletion, see parameters ``backup`` and ``backup_name``. + To backup data before any deletion, see parameters \ :literal:`backup`\ and \ :literal:`backup\_name`\ . - If set to ``false``, the file or data set will only be copied if the destination does not exist. + If set to \ :literal:`false`\ , the file or data set will only be copied if the destination does not exist. - If set to ``false`` and destination exists, the module exits with a note to the user. + If set to \ :literal:`false`\ and destination exists, the module exits with a note to the user. | **required**: False | **type**: bool @@ -163,11 +163,11 @@ force force_lock - By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. + By default, when \ :literal:`dest`\ is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use \ :literal:`force\_lock`\ to bypass this check and continue with copy. - If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. + If set to \ :literal:`true`\ and destination is a MVS data set opened by another process then zos\_copy will try to copy using DISP=SHR. - Using ``force_lock`` uses operations that are subject to race conditions and can lead to data loss, use with caution. + Using \ :literal:`force\_lock`\ uses operations that are subject to race conditions and can lead to data loss, use with caution. If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in the aliases not being preserved in the target dataset. When this scenario occurs the module will fail. @@ -177,9 +177,9 @@ force_lock ignore_sftp_stderr - During data transfer through SFTP, the module fails if the SFTP command directs any content to stderr. The user is able to override this behavior by setting this parameter to ``true``. By doing so, the module would essentially ignore the stderr stream produced by SFTP and continue execution. + During data transfer through SFTP, the module fails if the SFTP command directs any content to stderr. The user is able to override this behavior by setting this parameter to \ :literal:`true`\ . By doing so, the module would essentially ignore the stderr stream produced by SFTP and continue execution. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. + When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using \ :strong:`-vvvv`\ or through environment variables such as \ :strong:`verbosity = 4`\ , then this parameter will automatically be set to \ :literal:`true`\ . | **required**: False | **type**: bool @@ -187,11 +187,11 @@ ignore_sftp_stderr is_binary - If set to ``true``, indicates that the file or data set to be copied is a binary file or data set. + If set to \ :literal:`true`\ , indicates that the file or data set to be copied is a binary file or data set. - When *is_binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. + When \ :emphasis:`is\_binary=true`\ , no encoding conversion is applied to the content, all content transferred retains the original state. - Use *is_binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. + Use \ :emphasis:`is\_binary=true`\ when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. | **required**: False | **type**: bool @@ -199,15 +199,15 @@ is_binary executable - If set to ``true``, indicates that the file or library to be copied is an executable. + If set to \ :literal:`true`\ , indicates that the file or library to be copied is an executable. - If the ``src`` executable has an alias, the alias information is also copied. If the ``dest`` is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. + If the \ :literal:`src`\ executable has an alias, the alias information is also copied. If the \ :literal:`dest`\ is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. - If *executable=true*, and ``dest`` is a data set, it must be a PDS or PDSE (library). + If \ :emphasis:`executable=true`\ , and \ :literal:`dest`\ is a data set, it must be a PDS or PDSE (library). - If ``dest`` is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. + If \ :literal:`dest`\ is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. - If ``dest`` is a file, execute permission for the user will be added to the file (``u+x``). + If \ :literal:`dest`\ is a file, execute permission for the user will be added to the file (\`\`u+x\`\`). | **required**: False | **type**: bool @@ -215,9 +215,9 @@ executable aliases - If set to ``true``, indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. + If set to \ :literal:`true`\ , indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. - Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when ``executable=True`` and ``dest`` is a USS file or directory, this option will be ignored. + Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when \ :literal:`executable=True`\ and \ :literal:`dest`\ is a USS file or directory, this option will be ignored. Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. @@ -234,25 +234,47 @@ local_follow | **default**: True +group + Name of the group that will own the file system objects. + + When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. + + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + + | **required**: False + | **type**: str + + mode The permission of the destination file or directory. - If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. + + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + + The mode may also be specified as a symbolic mode (for example, \`\`u+rwx\`\` or \`\`u=rw,g=r,o=r\`\`) or a special string \`preserve\`. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the source file. + + | **required**: False + | **type**: str + + +owner + Name of the user that should own the filesystem object, as would be passed to the chown command. - The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. + When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. - *mode=preserve* means that the file will be given the same permissions as the source file. + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. | **required**: False | **type**: str remote_src - If set to ``false``, the module searches for ``src`` at the local machine. + If set to \ :literal:`false`\ , the module searches for \ :literal:`src`\ at the local machine. - If set to ``true``, the module goes to the remote/target machine for ``src``. + If set to \ :literal:`true`\ , the module goes to the remote/target machine for \ :literal:`src`\ . | **required**: False | **type**: bool @@ -262,23 +284,23 @@ remote_src src Path to a file/directory or name of a data set to copy to remote z/OS system. - If ``remote_src`` is true, then ``src`` must be the path to a Unix System Services (USS) file, name of a data set, or data set member. + If \ :literal:`remote\_src`\ is true, then \ :literal:`src`\ must be the path to a Unix System Services (USS) file, name of a data set, or data set member. - If ``src`` is a local path or a USS path, it can be absolute or relative. + If \ :literal:`src`\ is a local path or a USS path, it can be absolute or relative. - If ``src`` is a directory, ``dest`` must be a partitioned data set or a USS directory. + If \ :literal:`src`\ is a directory, \ :literal:`dest`\ must be a partitioned data set or a USS directory. - If ``src`` is a file and ``dest`` ends with "/" or is a directory, the file is copied to the directory with the same filename as ``src``. + If \ :literal:`src`\ is a file and \ :literal:`dest`\ ends with "/" or is a directory, the file is copied to the directory with the same filename as \ :literal:`src`\ . - If ``src`` is a directory and ends with "/", the contents of it will be copied into the root of ``dest``. If it doesn't end with "/", the directory itself will be copied. + If \ :literal:`src`\ is a directory and ends with "/", the contents of it will be copied into the root of \ :literal:`dest`\ . If it doesn't end with "/", the directory itself will be copied. - If ``src`` is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. + If \ :literal:`src`\ is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. - If ``src`` is a VSAM data set, ``dest`` must also be a VSAM. + If \ :literal:`src`\ is a VSAM data set, \ :literal:`dest`\ must also be a VSAM. Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. - Required unless using ``content``. + Required unless using \ :literal:`content`\ . | **required**: False | **type**: str @@ -295,22 +317,22 @@ validate volume - If ``dest`` does not exist, specify which volume ``dest`` should be allocated to. + If \ :literal:`dest`\ does not exist, specify which volume \ :literal:`dest`\ should be allocated to. Only valid when the destination is an MVS data set. The volume must already be present on the device. - If no volume is specified, storage management rules will be used to determine the volume where ``dest`` will be allocated. + If no volume is specified, storage management rules will be used to determine the volume where \ :literal:`dest`\ will be allocated. - If the storage administrator has specified a system default unit name and you do not set a ``volume`` name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. + If the storage administrator has specified a system default unit name and you do not set a \ :literal:`volume`\ name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. | **required**: False | **type**: str dest_data_set - Data set attributes to customize a ``dest`` data set to be copied into. + Data set attributes to customize a \ :literal:`dest`\ data set to be copied into. | **required**: False | **type**: dict @@ -321,22 +343,22 @@ dest_data_set | **required**: True | **type**: str - | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, MEMBER, BASIC, LIBRARY + | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, member, basic, library space_primary - If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int space_secondary - If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -345,21 +367,21 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **choices**: K, M, G, CYL, TRK + | **choices**: k, m, g, cyl, trk record_format - If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`fb`\ ) - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str - | **choices**: FB, VB, FBA, VBA, U + | **choices**: fb, vb, fba, vba, u record_length @@ -390,9 +412,9 @@ dest_data_set key_offset The key offset to use when creating a KSDS data set. - *key_offset* is required when *type=KSDS*. + \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . - *key_offset* should only be provided when *type=KSDS* + \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -401,9 +423,9 @@ dest_data_set key_length The key length to use when creating a KSDS data set. - *key_length* is required when *type=KSDS*. + \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . - *key_length* should only be provided when *type=KSDS* + \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -450,13 +472,13 @@ dest_data_set use_template - Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when ``src`` is a local file or directory. + Only valid when \ :literal:`src`\ is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ | **required**: False | **type**: bool @@ -466,9 +488,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . - These options are ignored unless ``use_template`` is true. + These options are ignored unless \ :literal:`use\_template`\ is true. | **required**: False | **type**: dict @@ -547,7 +569,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -743,11 +765,11 @@ Examples remote_src: true volume: '222222' dest_data_set: - type: SEQ + type: seq space_primary: 10 space_secondary: 3 - space_type: K - record_format: VB + space_type: k + record_format: vb record_length: 150 - name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL @@ -781,17 +803,17 @@ Notes .. note:: Destination data sets are assumed to be in catalog. When trying to copy to an uncataloged data set, the module assumes that the data set does not exist and will create it. - Destination will be backed up if either ``backup`` is ``true`` or ``backup_name`` is provided. If ``backup`` is ``false`` but ``backup_name`` is provided, task will fail. + Destination will be backed up if either \ :literal:`backup`\ is \ :literal:`true`\ or \ :literal:`backup\_name`\ is provided. If \ :literal:`backup`\ is \ :literal:`false`\ but \ :literal:`backup\_name`\ is provided, task will fail. When copying local files or directories, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file or directory being copied. Temporary files will always be deleted, regardless of success or failure of the copy task. VSAM data sets can only be copied to other VSAM data sets. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option ``executable`` that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. + Beginning in version 1.8.x, zos\_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option \ :literal:`executable`\ that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos\_copy.html) error. @@ -846,12 +868,12 @@ destination_attributes { "block_size": 32760, - "record_format": "FB", + "record_format": "fb", "record_length": 45, "space_primary": 2, "space_secondary": 1, - "space_type": "K", - "type": "PDSE" + "space_type": "k", + "type": "pdse" } block_size @@ -864,7 +886,7 @@ destination_attributes Record format of the dataset. | **type**: str - | **sample**: FB + | **sample**: fb record_length Record length of the dataset. @@ -888,17 +910,17 @@ destination_attributes Unit of measurement for space. | **type**: str - | **sample**: K + | **sample**: k type Type of dataset allocated. | **type**: str - | **sample**: PDSE + | **sample**: pdse checksum - SHA256 checksum of the file after running zos_copy. + SHA256 checksum of the file after running zos\_copy. | **returned**: When ``validate=true`` and if ``dest`` is USS | **type**: str diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 0ea34875f..3300c7d40 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -28,11 +28,11 @@ Parameters name - The name of the data set being managed. (e.g ``USER.TEST``) + The name of the data set being managed. (e.g \ :literal:`USER.TEST`\ ) - If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. + If \ :emphasis:`name`\ is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - Required if *type=MEMBER* or *state!=present* and not using *batch*. + Required if \ :emphasis:`type=member`\ or \ :emphasis:`state!=present`\ and not using \ :emphasis:`batch`\ . | **required**: False | **type**: str @@ -41,49 +41,49 @@ name state The final state desired for specified data set. - If *state=absent* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and the data set does not exist on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. + If \ :emphasis:`state=absent`\ and the data set does exist on the managed node, remove the data set, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. + If \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and \ :emphasis:`force=True`\ , the data set will be opened with \ :emphasis:`DISP=SHR`\ such that the entire data set can be accessed by other processes while the specified member is deleted. - If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with \ :emphasis:`changed=True`\ . - If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . - If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided \ :emphasis:`volumes`\ . If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . - If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. + If \ :emphasis:`state=present`\ and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=present* and *replace=True* and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with *changed=True*. + If \ :emphasis:`state=present`\ and \ :emphasis:`replace=True`\ and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=present`\ and \ :emphasis:`replace=False`\ and the data set is present on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + If \ :emphasis:`state=present`\ and \ :emphasis:`type=member`\ and the member does not exist in the data set, create a member formatted to store data, module completes successfully with \ :emphasis:`changed=True`\ . Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. - If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is already cataloged, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, module completes successfully with *changed=True*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, returns failure with \ :emphasis:`changed=False`\ . - If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=uncataloged`\ and the data set is not found, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. + If \ :emphasis:`state=uncataloged`\ and the data set is found, the data set is uncataloged, module completes successfully with \ :emphasis:`changed=True`\ . | **required**: False @@ -93,22 +93,22 @@ state type - The data set type to be used when creating a data set. (e.g ``pdse``) + The data set type to be used when creating a data set. (e.g \ :literal:`pdse`\ ). - ``MEMBER`` expects to be used with an existing partitioned data set. + \ :literal:`member`\ expects to be used with an existing partitioned data set. Choices are case-sensitive. | **required**: False | **type**: str - | **default**: PDS - | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, LIBRARY, BASIC, LARGE, MEMBER, HFS, ZFS + | **default**: pds + | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs space_primary The amount of primary space to allocate for the dataset. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -118,7 +118,7 @@ space_primary space_secondary The amount of secondary space to allocate for the dataset. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -128,25 +128,25 @@ space_secondary space_type The unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **default**: M - | **choices**: K, M, G, CYL, TRK + | **default**: m + | **choices**: k, m, g, cyl, trk record_format - The format of the data set. (e.g ``FB``) + The format of the data set. (e.g \ :literal:`FB`\ ) Choices are case-sensitive. - When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. + When \ :emphasis:`type=ksds`\ , \ :emphasis:`type=esds`\ , \ :emphasis:`type=rrds`\ , \ :emphasis:`type=lds`\ or \ :emphasis:`type=zfs`\ then \ :emphasis:`record\_format=None`\ , these types do not have a default \ :emphasis:`record\_format`\ . | **required**: False | **type**: str - | **default**: FB - | **choices**: FB, VB, FBA, VBA, U, F + | **default**: fb + | **choices**: fb, vb, fba, vba, u, f sms_storage_class @@ -216,9 +216,9 @@ directory_blocks key_offset The key offset to use when creating a KSDS data set. - *key_offset* is required when *type=KSDS*. + \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . - *key_offset* should only be provided when *type=KSDS* + \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -227,28 +227,28 @@ key_offset key_length The key length to use when creating a KSDS data set. - *key_length* is required when *type=KSDS*. + \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . - *key_length* should only be provided when *type=KSDS* + \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int volumes - If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. + If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. - If creating a data set, *volumes* specifies the volume(s) where the data set should be created. + If creating a data set, \ :emphasis:`volumes`\ specifies the volume(s) where the data set should be created. - If *volumes* is provided when *state=present*, and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. + If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=present`\ , and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. - If *volumes* is provided when *state=absent* and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. + If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=absent`\ and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. - *volumes* is required when *state=cataloged*. + \ :emphasis:`volumes`\ is required when \ :emphasis:`state=cataloged`\ . Accepts a string when using a single volume and a list of strings when using multiple. @@ -257,12 +257,12 @@ volumes replace - When *replace=True*, and *state=present*, existing data set matching *name* will be replaced. + When \ :emphasis:`replace=True`\ , and \ :emphasis:`state=present`\ , existing data set matching \ :emphasis:`name`\ will be replaced. Replacement is performed by deleting the existing data set and creating a new data set with the same name and desired attributes. Since the existing data set will be deleted prior to creating the new data set, no data set will exist if creation of the new data set fails. - If *replace=True*, all data in the original data set will be lost. + If \ :emphasis:`replace=True`\ , all data in the original data set will be lost. | **required**: False | **type**: bool @@ -272,7 +272,7 @@ replace tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -283,9 +283,9 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. - The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. + The \ :emphasis:`force=True`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . - The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*. + The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ . | **required**: False | **type**: bool @@ -301,11 +301,11 @@ batch name - The name of the data set being managed. (e.g ``USER.TEST``) + The name of the data set being managed. (e.g \ :literal:`USER.TEST`\ ) - If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. + If \ :emphasis:`name`\ is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - Required if *type=MEMBER* or *state!=present* + Required if \ :emphasis:`type=member`\ or \ :emphasis:`state!=present`\ | **required**: False | **type**: str @@ -314,49 +314,49 @@ batch state The final state desired for specified data set. - If *state=absent* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and the data set does not exist on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. + If \ :emphasis:`state=absent`\ and the data set does exist on the managed node, remove the data set, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=absent* and *type=MEMBER* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. + If \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and \ :emphasis:`force=True`\ , the data set will be opened with \ :emphasis:`DISP=SHR`\ such that the entire data set can be accessed by other processes while the specified member is deleted. - If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with \ :emphasis:`changed=True`\ . - If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . - If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. + If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided \ :emphasis:`volumes`\ . If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . - If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. + If \ :emphasis:`state=present`\ and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=present* and *replace=True* and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with *changed=True*. + If \ :emphasis:`state=present`\ and \ :emphasis:`replace=True`\ and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=present`\ and \ :emphasis:`replace=False`\ and the data set is present on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=present* and *type=MEMBER* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + If \ :emphasis:`state=present`\ and \ :emphasis:`type=member`\ and the member does not exist in the data set, create a member formatted to store data, module completes successfully with \ :emphasis:`changed=True`\ . Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. - If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is already cataloged, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, module completes successfully with *changed=True*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, module completes successfully with \ :emphasis:`changed=True`\ . - If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. + If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, returns failure with \ :emphasis:`changed=False`\ . - If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. + If \ :emphasis:`state=uncataloged`\ and the data set is not found, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . - If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. + If \ :emphasis:`state=uncataloged`\ and the data set is found, the data set is uncataloged, module completes successfully with \ :emphasis:`changed=True`\ . | **required**: False @@ -366,22 +366,22 @@ batch type - The data set type to be used when creating a data set. (e.g ``PDSE``) + The data set type to be used when creating a data set. (e.g \ :literal:`pdse`\ ) - ``MEMBER`` expects to be used with an existing partitioned data set. + \ :literal:`member`\ expects to be used with an existing partitioned data set. Choices are case-sensitive. | **required**: False | **type**: str - | **default**: PDS - | **choices**: KSDS, ESDS, RRDS, LDS, SEQ, PDS, PDSE, LIBRARY, BASIC, LARGE, MEMBER, HFS, ZFS + | **default**: pds + | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs space_primary The amount of primary space to allocate for the dataset. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -391,7 +391,7 @@ batch space_secondary The amount of secondary space to allocate for the dataset. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -401,25 +401,25 @@ batch space_type The unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **default**: M - | **choices**: K, M, G, CYL, TRK + | **default**: m + | **choices**: k, m, g, cyl, trk record_format - The format of the data set. (e.g ``FB``) + The format of the data set. (e.g \ :literal:`FB`\ ) Choices are case-sensitive. - When *type=KSDS*, *type=ESDS*, *type=RRDS*, *type=LDS* or *type=ZFS* then *record_format=None*, these types do not have a default *record_format*. + When \ :emphasis:`type=ksds`\ , \ :emphasis:`type=esds`\ , \ :emphasis:`type=rrds`\ , \ :emphasis:`type=lds`\ or \ :emphasis:`type=zfs`\ then \ :emphasis:`record\_format=None`\ , these types do not have a default \ :emphasis:`record\_format`\ . | **required**: False | **type**: str - | **default**: FB - | **choices**: FB, VB, FBA, VBA, U, F + | **default**: fb + | **choices**: fb, vb, fba, vba, u, f sms_storage_class @@ -489,9 +489,9 @@ batch key_offset The key offset to use when creating a KSDS data set. - *key_offset* is required when *type=KSDS*. + \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . - *key_offset* should only be provided when *type=KSDS* + \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -500,28 +500,28 @@ batch key_length The key length to use when creating a KSDS data set. - *key_length* is required when *type=KSDS*. + \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . - *key_length* should only be provided when *type=KSDS* + \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int volumes - If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. + If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. - If creating a data set, *volumes* specifies the volume(s) where the data set should be created. + If creating a data set, \ :emphasis:`volumes`\ specifies the volume(s) where the data set should be created. - If *volumes* is provided when *state=present*, and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. + If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=present`\ , and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. - If *volumes* is provided when *state=absent* and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. + If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=absent`\ and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. - *volumes* is required when *state=cataloged*. + \ :emphasis:`volumes`\ is required when \ :emphasis:`state=cataloged`\ . Accepts a string when using a single volume and a list of strings when using multiple. @@ -530,12 +530,12 @@ batch replace - When *replace=True*, and *state=present*, existing data set matching *name* will be replaced. + When \ :emphasis:`replace=True`\ , and \ :emphasis:`state=present`\ , existing data set matching \ :emphasis:`name`\ will be replaced. Replacement is performed by deleting the existing data set and creating a new data set with the same name and desired attributes. Since the existing data set will be deleted prior to creating the new data set, no data set will exist if creation of the new data set fails. - If *replace=True*, all data in the original data set will be lost. + If \ :emphasis:`replace=True`\ , all data in the original data set will be lost. | **required**: False | **type**: bool @@ -547,9 +547,9 @@ batch This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. - The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. + The \ :emphasis:`force=True`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . - The *force=True* only applies to data set members when *state=absent* and *type=MEMBER*. + The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ . | **required**: False | **type**: bool @@ -568,7 +568,7 @@ Examples - name: Create a sequential data set if it does not exist zos_data_set: name: someds.name.here - type: SEQ + type: seq state: present - name: Create a PDS data set if it does not exist @@ -576,27 +576,27 @@ Examples name: someds.name.here type: pds space_primary: 5 - space_type: M - record_format: FBA + space_type: m + record_format: fba record_length: 25 - name: Attempt to replace a data set if it exists zos_data_set: name: someds.name.here - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: U + space_type: m + record_format: u record_length: 25 replace: yes - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: name: someds.name.here - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: U + space_type: m + record_format: u record_length: 25 volumes: "222222" replace: yes @@ -604,19 +604,19 @@ Examples - name: Create an ESDS data set if it does not exist zos_data_set: name: someds.name.here - type: ESDS + type: esds - name: Create a KSDS data set if it does not exist zos_data_set: name: someds.name.here - type: KSDS + type: ksds key_length: 8 key_offset: 0 - name: Create an RRDS data set with storage class MYDATA if it does not exist zos_data_set: name: someds.name.here - type: RRDS + type: rrds sms_storage_class: mydata - name: Delete a data set if it exists @@ -633,43 +633,43 @@ Examples - name: Write a member to an existing PDS; replace if member exists zos_data_set: name: someds.name.here(mydata) - type: MEMBER + type: member replace: yes - name: Write a member to an existing PDS; do not replace if member exists zos_data_set: name: someds.name.here(mydata) - type: MEMBER + type: member - name: Remove a member from an existing PDS zos_data_set: name: someds.name.here(mydata) state: absent - type: MEMBER + type: member - name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR zos_data_set: name: someds.name.here(mydata) state: absent - type: MEMBER + type: member force: yes - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: - name: someds.name.here1 - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: FB + space_type: m + record_format: fb replace: yes - name: someds.name.here1(member1) - type: MEMBER + type: member - name: someds.name.here2(member1) - type: MEMBER + type: member replace: yes - name: someds.name.here2(member2) - type: MEMBER + type: member - name: Catalog a data set present on volume 222222 if it is uncataloged. zos_data_set: diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 4c2294e24..68089a3a6 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -37,7 +37,7 @@ encoding from - The character set of the source *src*. + The character set of the source \ :emphasis:`src`\ . | **required**: False | **type**: str @@ -45,7 +45,7 @@ encoding to - The destination *dest* character set for the output to be written as. + The destination \ :emphasis:`dest`\ character set for the output to be written as. | **required**: False | **type**: str @@ -58,7 +58,7 @@ src The USS path or file must be an absolute pathname. - If *src* is a USS directory, all files will be encoded. + If \ :emphasis:`src`\ is a USS directory, all files will be encoded. | **required**: True | **type**: str @@ -67,11 +67,11 @@ src dest The location where the converted characters are output. - The destination *dest* can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or KSDS (VSAM data set). + The destination \ :emphasis:`dest`\ can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or KSDS (VSAM data set). - If the length of the PDSE member name used in *dest* is greater than 8 characters, the member name will be truncated when written out. + If the length of the PDSE member name used in \ :emphasis:`dest`\ is greater than 8 characters, the member name will be truncated when written out. - If *dest* is not specified, the *src* will be used as the destination and will overwrite the *src* with the character set in the option *to_encoding*. + If \ :emphasis:`dest`\ is not specified, the \ :emphasis:`src`\ will be used as the destination and will overwrite the \ :emphasis:`src`\ with the character set in the option \ :emphasis:`to\_encoding`\ . The USS file or path must be an absolute pathname. @@ -80,9 +80,9 @@ dest backup - Creates a backup file or backup data set for *dest*, including the timestamp information to ensure that you retrieve the original file. + Creates a backup file or backup data set for \ :emphasis:`dest`\ , including the timestamp information to ensure that you retrieve the original file. - *backup_name* can be used to specify a backup file name if *backup=true*. + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . | **required**: False | **type**: bool @@ -92,13 +92,13 @@ backup backup_name Specify the USS file name or data set name for the dest backup. - If dest is a USS file or path, *backup_name* must be a file or path name, and the USS path or file must be an absolute pathname. + If dest is a USS file or path, \ :emphasis:`backup\_name`\ must be a file or path name, and the USS path or file must be an absolute pathname. - If dest is an MVS data set, the *backup_name* must be an MVS data set name. + If dest is an MVS data set, the \ :emphasis:`backup\_name`\ must be an MVS data set name. - If *backup_name* is not provided, the default backup name will be used. The default backup name for a USS file or path will be the destination file or path name appended with a timestamp, e.g. /path/file_name.2020-04-23-08-32-29-bak.tar. If dest is an MVS data set, the default backup name will be a random name generated by IBM Z Open Automation Utilities. + If \ :emphasis:`backup\_name`\ is not provided, the default backup name will be used. The default backup name for a USS file or path will be the destination file or path name appended with a timestamp, e.g. /path/file\_name.2020-04-23-08-32-29-bak.tar. If dest is an MVS data set, the default backup name will be a random name generated by IBM Z Open Automation Utilities. - ``backup_name`` will be returned on either success or failure of module execution such that data can be retrieved. + \ :literal:`backup\_name`\ will be returned on either success or failure of module execution such that data can be retrieved. | **required**: False | **type**: str @@ -107,7 +107,7 @@ backup_name backup_compress Determines if backups to USS files or paths should be compressed. - *backup_compress* is only used when *backup=true*. + \ :emphasis:`backup\_compress`\ is only used when \ :emphasis:`backup=true`\ . | **required**: False | **type**: bool @@ -117,7 +117,7 @@ backup_compress tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -265,7 +265,7 @@ Notes All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . @@ -278,7 +278,7 @@ Return Values src - The location of the input characters identified in option *src*. + The location of the input characters identified in option \ :emphasis:`src`\ . | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 87a50a65a..7cdcabbd5 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -20,7 +20,7 @@ Synopsis - When fetching a sequential data set, the destination file name will be the same as the data set name. - When fetching a PDS or PDSE, the destination will be a directory with the same name as the PDS or PDSE. - When fetching a PDS/PDSE member, destination will be a file. -- Files that already exist at ``dest`` will be overwritten if they are different than ``src``. +- Files that already exist at \ :literal:`dest`\ will be overwritten if they are different than \ :literal:`src`\ . @@ -96,7 +96,7 @@ encoding from - The character set of the source *src*. + The character set of the source \ :emphasis:`src`\ . Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -105,7 +105,7 @@ encoding to - The destination *dest* character set for the output to be written as. + The destination \ :emphasis:`dest`\ character set for the output to be written as. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -117,16 +117,16 @@ encoding tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str ignore_sftp_stderr - During data transfer through sftp, the module fails if the sftp command directs any content to stderr. The user is able to override this behavior by setting this parameter to ``true``. By doing so, the module would essentially ignore the stderr stream produced by sftp and continue execution. + During data transfer through sftp, the module fails if the sftp command directs any content to stderr. The user is able to override this behavior by setting this parameter to \ :literal:`true`\ . By doing so, the module would essentially ignore the stderr stream produced by sftp and continue execution. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. + When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using \ :strong:`-vvvv`\ or through environment variables such as \ :strong:`verbosity = 4`\ , then this parameter will automatically be set to \ :literal:`true`\ . | **required**: False | **type**: bool @@ -196,13 +196,13 @@ Notes .. note:: When fetching PDSE and VSAM data sets, temporary storage will be used on the remote z/OS system. After the PDSE or VSAM data set is successfully transferred, the temporary storage will be deleted. The size of the temporary storage will correspond to the size of PDSE or VSAM data set being fetched. If module execution fails, the temporary storage will be deleted. - To ensure optimal performance, data integrity checks for PDS, PDSE, and members of PDS or PDSE are done through the transfer methods used. As a result, the module response will not include the ``checksum`` parameter. + To ensure optimal performance, data integrity checks for PDS, PDSE, and members of PDS or PDSE are done through the transfer methods used. As a result, the module response will not include the \ :literal:`checksum`\ parameter. All data sets are always assumed to be cataloged. If an uncataloged data set needs to be fetched, it should be cataloged first. Fetching HFS or ZFS type data sets is currently not supported. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. @@ -263,7 +263,7 @@ data_set_type | **sample**: PDSE note - Notice of module failure when ``fail_on_missing`` is false. + Notice of module failure when \ :literal:`fail\_on\_missing`\ is false. | **returned**: failure and fail_on_missing=false | **type**: str diff --git a/docs/source/modules/zos_find.rst b/docs/source/modules/zos_find.rst index f195b2c2c..83082b5c0 100644 --- a/docs/source/modules/zos_find.rst +++ b/docs/source/modules/zos_find.rst @@ -18,7 +18,7 @@ Synopsis -------- - Return a list of data sets based on specific criteria. - Multiple criteria can be added (AND'd) together. -- The ``zos_find`` module can only find MVS data sets. Use the `find <https://docs.ansible.com/ansible/latest/modules/find_module.html>`_ module to find USS files. +- The \ :literal:`zos\_find`\ module can only find MVS data sets. Use the \ `find <https://docs.ansible.com/ansible/latest/modules/find_module.html>`__\ module to find USS files. @@ -44,9 +44,9 @@ age age_stamp Choose the age property against which to compare age. - ``creation_date`` is the date the data set was created and ``ref_date`` is the date the data set was last referenced. + \ :literal:`creation\_date`\ is the date the data set was created and \ :literal:`ref\_date`\ is the date the data set was last referenced. - ``ref_date`` is only applicable to sequential and partitioned data sets. + \ :literal:`ref\_date`\ is only applicable to sequential and partitioned data sets. | **required**: False | **type**: str @@ -80,7 +80,7 @@ patterns This parameter expects a list, which can be either comma separated or YAML. - If ``pds_patterns`` is provided, ``patterns`` must be member patterns. + If \ :literal:`pds\_patterns`\ is provided, \ :literal:`patterns`\ must be member patterns. When searching for members within a PDS/PDSE, pattern can be a regular expression. @@ -107,7 +107,7 @@ pds_patterns Required when searching for data set members. - Valid only for ``nonvsam`` resource types. Otherwise ignored. + Valid only for \ :literal:`nonvsam`\ resource types. Otherwise ignored. | **required**: False | **type**: list @@ -117,9 +117,9 @@ pds_patterns resource_type The type of resource to search. - ``nonvsam`` refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. + \ :literal:`nonvsam`\ refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. - ``cluster`` refers to a VSAM cluster. The ``data`` and ``index`` are the data and index components of a VSAM cluster. + \ :literal:`cluster`\ refers to a VSAM cluster. The \ :literal:`data`\ and \ :literal:`index`\ are the data and index components of a VSAM cluster. | **required**: False | **type**: str @@ -192,11 +192,11 @@ Notes ----- .. note:: - Only cataloged data sets will be searched. If an uncataloged data set needs to be searched, it should be cataloged first. The `zos_data_set <./zos_data_set.html>`_ module can be used to catalog uncataloged data sets. + Only cataloged data sets will be searched. If an uncataloged data set needs to be searched, it should be cataloged first. The \ `zos\_data\_set <./zos_data_set.html>`__\ module can be used to catalog uncataloged data sets. - The `zos_find <./zos_find.html>`_ module currently does not support wildcards for high level qualifiers. For example, ``SOME.*.DATA.SET`` is a valid pattern, but ``*.DATA.SET`` is not. + The \ `zos\_find <./zos_find.html>`__\ module currently does not support wildcards for high level qualifiers. For example, \ :literal:`SOME.\*.DATA.SET`\ is a valid pattern, but \ :literal:`\*.DATA.SET`\ is not. - If a data set pattern is specified as ``USER.*``, the matching data sets will have two name segments such as ``USER.ABC``, ``USER.XYZ`` etc. If a wildcard is specified as ``USER.*.ABC``, the matching data sets will have three name segments such as ``USER.XYZ.ABC``, ``USER.TEST.ABC`` etc. + If a data set pattern is specified as \ :literal:`USER.\*`\ , the matching data sets will have two name segments such as \ :literal:`USER.ABC`\ , \ :literal:`USER.XYZ`\ etc. If a wildcard is specified as \ :literal:`USER.\*.ABC`\ , the matching data sets will have three name segments such as \ :literal:`USER.XYZ.ABC`\ , \ :literal:`USER.TEST.ABC`\ etc. The time taken to execute the module is proportional to the number of data sets present on the system and how large the data sets are. diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 0247ffd96..02a56fd23 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -17,8 +17,8 @@ zos_gather_facts -- Gather z/OS system facts. Synopsis -------- - Retrieve variables from target z/OS systems. -- Variables are added to the *ansible_facts* dictionary, available to playbooks. -- Apply filters on the *gather_subset* list to reduce the variables that are added to the *ansible_facts* dictionary. +- Variables are added to the \ :emphasis:`ansible\_facts`\ dictionary, available to playbooks. +- Apply filters on the \ :emphasis:`gather\_subset`\ list to reduce the variables that are added to the \ :emphasis:`ansible\_facts`\ dictionary. - Note, the module will fail fast if any unsupported options are provided. This is done to raise awareness of a failure in an automation setting. @@ -32,7 +32,7 @@ Parameters gather_subset If specified, it will collect facts that come under the specified subset (eg. ipl will return ipl facts). Specifying subsets is recommended to reduce time in gathering facts when the facts needed are in a specific subset. - The following subsets are available ``ipl``, ``cpu``, ``sys``, and ``iodf``. Depending on the version of ZOAU, additional subsets may be available. + The following subsets are available \ :literal:`ipl`\ , \ :literal:`cpu`\ , \ :literal:`sys`\ , and \ :literal:`iodf`\ . Depending on the version of ZOAU, additional subsets may be available. | **required**: False | **type**: list @@ -41,13 +41,13 @@ gather_subset filter - Filter out facts from the *ansible_facts* dictionary. + Filter out facts from the \ :emphasis:`ansible\_facts`\ dictionary. - Uses shell-style `fnmatch <https://docs.python.org/3/library/fnmatch.html>`_ pattern matching to filter out the collected facts. + Uses shell-style \ `fnmatch <https://docs.python.org/3/library/fnmatch.html>`__\ pattern matching to filter out the collected facts. - An empty list means 'no filter', same as providing '*'. + An empty list means 'no filter', same as providing '\*'. - Filtering is performed after the facts are gathered such that no compute is saved when filtering. Filtering only reduces the number of variables that are added to the *ansible_facts* dictionary. To restrict the facts that are collected, refer to the *gather_subset* parameter. + Filtering is performed after the facts are gathered such that no compute is saved when filtering. Filtering only reduces the number of variables that are added to the \ :emphasis:`ansible\_facts`\ dictionary. To restrict the facts that are collected, refer to the \ :emphasis:`gather\_subset`\ parameter. | **required**: False | **type**: list diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index efea6ea2a..59e37aeb9 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -18,9 +18,9 @@ Synopsis -------- - Display the z/OS job output for a given criteria (Job id/Job name/owner) with/without a data definition name as a filter. - At least provide a job id/job name/owner. -- The job id can be specific such as "STC02560", or one that uses a pattern such as "STC*" or "*". -- The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP*" or "*". -- The owner can be specific such as "IBMUSER", or one that uses a pattern like "*". +- The job id can be specific such as "STC02560", or one that uses a pattern such as "STC\*" or "\*". +- The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP\*" or "\*". +- The owner can be specific such as "IBMUSER", or one that uses a pattern like "\*". - If there is no ddname, or if ddname="?", output of all the ddnames under the given job will be displayed. @@ -32,21 +32,21 @@ Parameters job_id - The z/OS job ID of the job containing the spool file. (e.g "STC02560", "STC*") + The z/OS job ID of the job containing the spool file. (e.g "STC02560", "STC\*") | **required**: False | **type**: str job_name - The name of the batch job. (e.g "TCPIP", "C*") + The name of the batch job. (e.g "TCPIP", "C\*") | **required**: False | **type**: str owner - The owner who ran the job. (e.g "IBMUSER", "*") + The owner who ran the job. (e.g "IBMUSER", "\*") | **required**: False | **type**: str @@ -97,7 +97,7 @@ Return Values jobs - The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. + The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret\_code dictionary with parameter msg\_txt = The job could not be found. | **returned**: success | **type**: list @@ -416,7 +416,7 @@ jobs | **sample**: CC 0000 msg_code - Return code extracted from the `msg` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". + Return code extracted from the \`msg\` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". | **type**: str | **sample**: S0C4 diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index ea320dfc3..e4da71341 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -17,8 +17,8 @@ zos_job_query -- Query job status Synopsis -------- - List z/OS job(s) and the current status of the job(s). -- Uses job_name to filter the jobs by the job name. -- Uses job_id to filter the jobs by the job identifier. +- Uses job\_name to filter the jobs by the job name. +- Uses job\_id to filter the jobs by the job identifier. - Uses owner to filter the jobs by the job owner. - Uses system to filter the jobs by system where the job is running (or ran) on. @@ -35,9 +35,9 @@ job_name A job name can be up to 8 characters long. - The *job_name* can contain include multiple wildcards. + The \ :emphasis:`job\_name`\ can contain include multiple wildcards. - The asterisk (`*`) wildcard will match zero or more specified characters. + The asterisk (\`\*\`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -56,13 +56,13 @@ owner job_id The job id that has been assigned to the job. - A job id must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. + A job id must begin with \`STC\`, \`JOB\`, \`TSU\` and are followed by up to 5 digits. - When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. + When a job id is greater than 99,999, the job id format will begin with \`S\`, \`J\`, \`T\` and are followed by 7 digits. - The *job_id* can contain include multiple wildcards. + The \ :emphasis:`job\_id`\ can contain include multiple wildcards. - The asterisk (`*`) wildcard will match zero or more specified characters. + The asterisk (\`\*\`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -122,7 +122,7 @@ changed | **type**: bool jobs - The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. + The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret\_code dictionary with parameter msg\_txt = The job could not be found. | **returned**: success | **type**: list @@ -211,7 +211,7 @@ jobs | **sample**: CC 0000 msg_code - Return code extracted from the `msg` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". + Return code extracted from the \`msg\` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". | **type**: str | **sample**: S0C4 diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 8f4dda61b..964ab8f4b 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -42,24 +42,24 @@ src location - The JCL location. Supported choices are ``DATA_SET``, ``USS`` or ``LOCAL``. + The JCL location. Supported choices are \ :literal:`data\_set`\ , \ :literal:`uss`\ or \ :literal:`local`\ . - DATA_SET can be a PDS, PDSE, or sequential data set. + \ :literal:`data\_set`\ can be a PDS, PDSE, or sequential data set. - USS means the JCL location is located in UNIX System Services (USS). + \ :literal:`uss`\ means the JCL location is located in UNIX System Services (USS). - LOCAL means locally to the ansible control node. + \ :literal:`local`\ means locally to the ansible control node. | **required**: False | **type**: str - | **default**: DATA_SET - | **choices**: DATA_SET, USS, LOCAL + | **default**: data_set + | **choices**: data_set, uss, local wait_time_s - Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. + Option \ :emphasis:`wait\_time\_s`\ is the total time that module \ `zos\_job\_submit <./zos_job_submit.html>`__\ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. - *wait_time_s* is measured in seconds and must be a value greater than 0 and less than 86400. + \ :emphasis:`wait\_time\_s`\ is measured in seconds and must be a value greater than 0 and less than 86400. | **required**: False | **type**: int @@ -84,11 +84,11 @@ return_output volume - The volume serial (VOLSER)is where the data set resides. The option is required only when the data set is not cataloged on the system. + The volume serial (VOLSER) is where the data set resides. The option is required only when the data set is not cataloged on the system. - When configured, the `zos_job_submit <./zos_job_submit.html>`_ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. + When configured, the \ `zos\_job\_submit <./zos_job_submit.html>`__\ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - Ignored for *location=USS* and *location=LOCAL*. + Ignored for \ :emphasis:`location=uss`\ and \ :emphasis:`location=local`\ . | **required**: False | **type**: str @@ -97,7 +97,7 @@ volume encoding Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - This option is only supported for when *location=LOCAL*. + This option is only supported for when \ :emphasis:`location=local`\ . If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system encoding. @@ -129,13 +129,13 @@ encoding use_template - Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when ``src`` is a local file or directory. + Only valid when \ :literal:`src`\ is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ | **required**: False | **type**: bool @@ -145,9 +145,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . - These options are ignored unless ``use_template`` is true. + These options are ignored unless \ :literal:`use\_template`\ is true. | **required**: False | **type**: dict @@ -226,7 +226,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -267,22 +267,22 @@ Examples .. code-block:: yaml+jinja - - name: Submit JCL in a PDSE member + - name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: DATA_SET + location: data_set register: response - name: Submit JCL in USS with no DDs in the output. zos_job_submit: src: /u/tester/demo/sample.jcl - location: USS + location: uss return_output: false - name: Convert local JCL to IBM-037 and submit the job. zos_job_submit: src: /Users/maxy/ansible-playbooks/provision/sample.jcl - location: LOCAL + location: local encoding: from: ISO8859-1 to: IBM-037 @@ -290,25 +290,25 @@ Examples - name: Submit JCL in an uncataloged PDSE on volume P2SS01. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: DATA_SET + location: data_set volume: P2SS01 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: DATA_SET + location: data_set wait_time_s: 30 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: DATA_SET + location: data_set wait_time_s: 30 - name: Submit JCL and set the max return code the module should fail on to 16. zos_job_submit: src: HLQ.DATA.LLQ - location: DATA_SET + location: data_set max_rc: 16 @@ -318,9 +318,9 @@ Notes ----- .. note:: - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . - This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. @@ -333,7 +333,7 @@ Return Values jobs - List of jobs output. If no job status is found, this will return an empty ret_code with msg_txt explanation. + List of jobs output. If no job status is found, this will return an empty ret\_code with msg\_txt explanation. | **returned**: success | **type**: list @@ -680,25 +680,27 @@ jobs msg Job status resulting from the job submission. - Job status `ABEND` indicates the job ended abnormally. + Job status \`ABEND\` indicates the job ended abnormally. - Job status `AC` indicates the job is active, often a started task or job taking long. + Job status \`AC\` indicates the job is active, often a started task or job taking long. - Job status `CAB` indicates a converter abend. + Job status \`CAB\` indicates a converter abend. - Job status `CANCELED` indicates the job was canceled. + Job status \`CANCELED\` indicates the job was canceled. - Job status `CNV` indicates a converter error. + Job status \`CNV\` indicates a converter error. - Job status `FLU` indicates the job was flushed. + Job status \`FLU\` indicates the job was flushed. - Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. + Job status \`JCLERR\` or \`JCL ERROR\` indicates the JCL has an error. - Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. + Job status \`SEC\` or \`SEC ERROR\` indicates the job as encountered a security error. - Job status `SYS` indicates a system failure. + Job status \`SYS\` indicates a system failure. - Job status `?` indicates status can not be determined. + Job status \`?\` indicates status can not be determined. + + Jobs where status can not be determined will result in None (NULL). | **type**: str | **sample**: AC @@ -706,18 +708,22 @@ jobs msg_code The return code from the submitted job as a string. + Jobs which have no return code will result in None (NULL), such is the case of a job that errors or is active. + | **type**: str msg_txt Returns additional information related to the submitted job. + Jobs which have no additional information will result in None (NULL). + | **type**: str | **sample**: The job JOB00551 was run with special job processing TYPRUN=SCAN. This will result in no completion, return code or job steps and changed will be false. code The return code converted to an integer value when available. - Jobs which have no return code will return NULL, such is the case of a job that errors or is active. + Jobs which have no return code will result in None (NULL), such is the case of a job that errors or is active. | **type**: int @@ -788,10 +794,3 @@ jobs | **sample**: IEBGENER -message - This option is being deprecated - - | **returned**: success - | **type**: str - | **sample**: Submit JCL operation succeeded. - diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index f7005017e..983e5ca0b 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -40,13 +40,13 @@ src regexp The regular expression to look for in every line of the USS file or data set. - For ``state=present``, the pattern to replace if found. Only the last line found will be replaced. + For \ :literal:`state=present`\ , the pattern to replace if found. Only the last line found will be replaced. - For ``state=absent``, the pattern of the line(s) to remove. + For \ :literal:`state=absent`\ , the pattern of the line(s) to remove. - If the regular expression is not matched, the line will be added to the USS file or data set in keeping with ``insertbefore`` or ``insertafter`` settings. + If the regular expression is not matched, the line will be added to the USS file or data set in keeping with \ :literal:`insertbefore`\ or \ :literal:`insertafter`\ settings. - When modifying a line the regexp should typically match both the initial state of the line as well as its state after replacement by ``line`` to ensure idempotence. + When modifying a line the regexp should typically match both the initial state of the line as well as its state after replacement by \ :literal:`line`\ to ensure idempotence. | **required**: False | **type**: str @@ -64,22 +64,22 @@ state line The line to insert/replace into the USS file or data set. - Required for ``state=present``. + Required for \ :literal:`state=present`\ . - If ``backrefs`` is set, may contain backreferences that will get expanded with the ``regexp`` capture groups if the regexp matches. + If \ :literal:`backrefs`\ is set, may contain backreferences that will get expanded with the \ :literal:`regexp`\ capture groups if the regexp matches. | **required**: False | **type**: str backrefs - Used with ``state=present``. + Used with \ :literal:`state=present`\ . - If set, ``line`` can contain backreferences (both positional and named) that will get populated if the ``regexp`` matches. + If set, \ :literal:`line`\ can contain backreferences (both positional and named) that will get populated if the \ :literal:`regexp`\ matches. - This parameter changes the operation of the module slightly; ``insertbefore`` and ``insertafter`` will be ignored, and if the ``regexp`` does not match anywhere in the USS file or data set, the USS file or data set will be left unchanged. + This parameter changes the operation of the module slightly; \ :literal:`insertbefore`\ and \ :literal:`insertafter`\ will be ignored, and if the \ :literal:`regexp`\ does not match anywhere in the USS file or data set, the USS file or data set will be left unchanged. - If the ``regexp`` does match, the last matching line will be replaced by the expanded line parameter. + If the \ :literal:`regexp`\ does match, the last matching line will be replaced by the expanded line parameter. | **required**: False | **type**: bool @@ -87,23 +87,23 @@ backrefs insertafter - Used with ``state=present``. + Used with \ :literal:`state=present`\ . If specified, the line will be inserted after the last match of specified regular expression. If the first match is required, use(firstmatch=yes). - A special value is available; ``EOF`` for inserting the line at the end of the USS file or data set. + A special value is available; \ :literal:`EOF`\ for inserting the line at the end of the USS file or data set. If the specified regular expression has no matches, EOF will be used instead. - If ``insertbefore`` is set, default value ``EOF`` will be ignored. + If \ :literal:`insertbefore`\ is set, default value \ :literal:`EOF`\ will be ignored. - If regular expressions are passed to both ``regexp`` and ``insertafter``, ``insertafter`` is only honored if no match for ``regexp`` is found. + If regular expressions are passed to both \ :literal:`regexp`\ and \ :literal:`insertafter`\ , \ :literal:`insertafter`\ is only honored if no match for \ :literal:`regexp`\ is found. - May not be used with ``backrefs`` or ``insertbefore``. + May not be used with \ :literal:`backrefs`\ or \ :literal:`insertbefore`\ . - Choices are EOF or '*regex*' + Choices are EOF or '\*regex\*' Default is EOF @@ -112,30 +112,30 @@ insertafter insertbefore - Used with ``state=present``. + Used with \ :literal:`state=present`\ . If specified, the line will be inserted before the last match of specified regular expression. - If the first match is required, use ``firstmatch=yes``. + If the first match is required, use \ :literal:`firstmatch=yes`\ . - A value is available; ``BOF`` for inserting the line at the beginning of the USS file or data set. + A value is available; \ :literal:`BOF`\ for inserting the line at the beginning of the USS file or data set. If the specified regular expression has no matches, the line will be inserted at the end of the USS file or data set. - If regular expressions are passed to both ``regexp`` and ``insertbefore``, ``insertbefore`` is only honored if no match for ``regexp`` is found. + If regular expressions are passed to both \ :literal:`regexp`\ and \ :literal:`insertbefore`\ , \ :literal:`insertbefore`\ is only honored if no match for \ :literal:`regexp`\ is found. - May not be used with ``backrefs`` or ``insertafter``. + May not be used with \ :literal:`backrefs`\ or \ :literal:`insertafter`\ . - Choices are BOF or '*regex*' + Choices are BOF or '\*regex\*' | **required**: False | **type**: str backup - Creates a backup file or backup data set for *src*, including the timestamp information to ensure that you retrieve the original file. + Creates a backup file or backup data set for \ :emphasis:`src`\ , including the timestamp information to ensure that you retrieve the original file. - *backup_name* can be used to specify a backup file name if *backup=true*. + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . The backup file name will be return on either success or failure of module execution such that data can be retrieved. @@ -147,11 +147,11 @@ backup backup_name Specify the USS file name or data set name for the destination backup. - If the source *src* is a USS file or path, the backup_name must be a file or path name, and the USS file or path must be an absolute path name. + If the source \ :emphasis:`src`\ is a USS file or path, the backup\_name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup_name must be an MVS data set name. + If the source is an MVS data set, the backup\_name must be an MVS data set name. - If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. + If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -162,16 +162,16 @@ backup_name tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str firstmatch - Used with ``insertafter`` or ``insertbefore``. + Used with \ :literal:`insertafter`\ or \ :literal:`insertbefore`\ . - If set, ``insertafter`` and ``insertbefore`` will work with the first line that matches the given regular expression. + If set, \ :literal:`insertafter`\ and \ :literal:`insertbefore`\ will work with the first line that matches the given regular expression. | **required**: False | **type**: bool @@ -179,7 +179,7 @@ firstmatch encoding - The character set of the source *src*. `zos_lineinfile <./zos_lineinfile.html>`_ requires to be provided with correct encoding to read the content of USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. + The character set of the source \ :emphasis:`src`\ . \ `zos\_lineinfile <./zos_lineinfile.html>`__\ requires to be provided with correct encoding to read the content of USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -193,7 +193,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. + The \ :literal:`force`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . | **required**: False | **type**: bool @@ -260,7 +260,7 @@ Notes All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . @@ -273,7 +273,7 @@ Return Values changed - Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. + Indicates if the source was modified. Value of 1 represents \`true\`, otherwise \`false\`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index 42e8a8ea6..9a30c5c91 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -16,9 +16,9 @@ zos_mount -- Mount a z/OS file system. Synopsis -------- -- The module `zos_mount <./zos_mount.html>`_ can manage mount operations for a z/OS UNIX System Services (USS) file system data set. -- The *src* data set must be unique and a Fully Qualified Name (FQN). -- The *path* will be created if needed. +- The module \ `zos\_mount <./zos_mount.html>`__\ can manage mount operations for a z/OS UNIX System Services (USS) file system data set. +- The \ :emphasis:`src`\ data set must be unique and a Fully Qualified Name (FQN). +- The \ :emphasis:`path`\ will be created if needed. @@ -31,7 +31,7 @@ Parameters path The absolute path name onto which the file system is to be mounted. - The *path* is case sensitive and must be less than or equal 1023 characters long. + The \ :emphasis:`path`\ is case sensitive and must be less than or equal 1023 characters long. | **required**: True | **type**: str @@ -40,9 +40,9 @@ path src The name of the file system to be added to the file system hierarchy. - The file system *src* must be a data set of type *fs_type*. + The file system \ :emphasis:`src`\ must be a data set of type \ :emphasis:`fs\_type`\ . - The file system *src* data set must be cataloged. + The file system \ :emphasis:`src`\ data set must be cataloged. | **required**: True | **type**: str @@ -53,35 +53,35 @@ fs_type The physical file systems data set format to perform the logical mount. - The *fs_type* is required to be uppercase. + The \ :emphasis:`fs\_type`\ is required to be lowercase. | **required**: True | **type**: str - | **choices**: HFS, ZFS, NFS, TFS + | **choices**: hfs, zfs, nfs, tfs state The desired status of the described mount (choice). - If *state=mounted* and *src* are not in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will be updated, the device will be mounted and the module will complete successfully with *changed=True*. + If \ :emphasis:`state=mounted`\ and \ :emphasis:`src`\ are not in use, the module will add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The \ :emphasis:`path`\ will be updated, the device will be mounted and the module will complete successfully with \ :emphasis:`changed=True`\ . - If *state=mounted* and *src* are in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will not be updated, the device will not be mounted and the module will complete successfully with *changed=False*. + If \ :emphasis:`state=mounted`\ and \ :emphasis:`src`\ are in use, the module will add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The \ :emphasis:`path`\ will not be updated, the device will not be mounted and the module will complete successfully with \ :emphasis:`changed=False`\ . - If *state=unmounted* and *src* are in use, the module will **not** add the file system entry to the parmlib member *persistent/data_store*. The device will be unmounted and the module will complete successfully with *changed=True*. + If \ :emphasis:`state=unmounted`\ and \ :emphasis:`src`\ are in use, the module will \ :strong:`not`\ add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ . The device will be unmounted and the module will complete successfully with \ :emphasis:`changed=True`\ . - If *state=unmounted* and *src* are not in use, the module will **not** add the file system entry to parmlib member *persistent/data_store*.The device will remain unchanged and the module will complete with *changed=False*. + If \ :emphasis:`state=unmounted`\ and \ :emphasis:`src`\ are not in use, the module will \ :strong:`not`\ add the file system entry to parmlib member \ :emphasis:`persistent/data\_store`\ .The device will remain unchanged and the module will complete with \ :emphasis:`changed=False`\ . - If *state=present*, the module will add the file system entry to the provided parmlib member *persistent/data_store* if not present. The module will complete successfully with *changed=True*. + If \ :emphasis:`state=present`\ , the module will add the file system entry to the provided parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The module will complete successfully with \ :emphasis:`changed=True`\ . - If *state=absent*, the module will remove the file system entry to the provided parmlib member *persistent/data_store* if present. The module will complete successfully with *changed=True*. + If \ :emphasis:`state=absent`\ , the module will remove the file system entry to the provided parmlib member \ :emphasis:`persistent/data\_store`\ if present. The module will complete successfully with \ :emphasis:`changed=True`\ . - If *state=remounted*, the module will **not** add the file system entry to parmlib member *persistent/data_store*. The device will be unmounted and mounted, the module will complete successfully with *changed=True*. + If \ :emphasis:`state=remounted`\ , the module will \ :strong:`not`\ add the file system entry to parmlib member \ :emphasis:`persistent/data\_store`\ . The device will be unmounted and mounted, the module will complete successfully with \ :emphasis:`changed=True`\ . | **required**: False @@ -91,7 +91,7 @@ state persistent - Add or remove mount command entries to provided *data_store* + Add or remove mount command entries to provided \ :emphasis:`data\_store`\ | **required**: False | **type**: dict @@ -105,9 +105,9 @@ persistent backup - Creates a backup file or backup data set for *data_store*, including the timestamp information to ensure that you retrieve the original parameters defined in *data_store*. + Creates a backup file or backup data set for \ :emphasis:`data\_store`\ , including the timestamp information to ensure that you retrieve the original parameters defined in \ :emphasis:`data\_store`\ . - *backup_name* can be used to specify a backup file name if *backup=true*. + \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -119,11 +119,11 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source *data_store* is a USS file or path, the *backup_name* name can be relative or absolute for file or path name. + If the source \ :emphasis:`data\_store`\ is a USS file or path, the \ :emphasis:`backup\_name`\ name can be relative or absolute for file or path name. - If the source is an MVS data set, the backup_name must be an MVS data set name. + If the source is an MVS data set, the backup\_name must be an MVS data set name. - If the backup_name is not provided, the default *backup_name* will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``. + If the backup\_name is not provided, the default \ :emphasis:`backup\_name`\ will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -132,9 +132,9 @@ persistent comment - If provided, this is used as a comment that surrounds the command in the *persistent/data_store* + If provided, this is used as a comment that surrounds the command in the \ :emphasis:`persistent/data\_store`\ - Comments are used to encapsulate the *persistent/data_store* entry such that they can easily be understood and located. + Comments are used to encapsulate the \ :emphasis:`persistent/data\_store`\ entry such that they can easily be understood and located. | **required**: False | **type**: list @@ -145,29 +145,29 @@ persistent unmount_opts Describes how the unmount will be performed. - For more on coded character set identifiers, review the IBM documentation topic **UNMOUNT - Remove a file system from the file hierarchy**. + For more on coded character set identifiers, review the IBM documentation topic \ :strong:`UNMOUNT - Remove a file system from the file hierarchy`\ . | **required**: False | **type**: str - | **default**: NORMAL - | **choices**: DRAIN, FORCE, IMMEDIATE, NORMAL, REMOUNT, RESET + | **default**: normal + | **choices**: drain, force, immediate, normal, remount, reset mount_opts Options available to the mount. - If *mount_opts=RO* on a mounted/remount, mount is performed read-only. + If \ :emphasis:`mount\_opts=ro`\ on a mounted/remount, mount is performed read-only. - If *mount_opts=SAME* and (unmount_opts=REMOUNT), mount is opened in the same mode as previously opened. + If \ :emphasis:`mount\_opts=same`\ and (unmount\_opts=remount), mount is opened in the same mode as previously opened. - If *mount_opts=NOWAIT*, mount is performed asynchronously. + If \ :emphasis:`mount\_opts=nowait`\ , mount is performed asynchronously. - If *mount_opts=NOSECURITY*, security checks are not enforced for files in this file system. + If \ :emphasis:`mount\_opts=nosecurity`\ , security checks are not enforced for files in this file system. | **required**: False | **type**: str - | **default**: RW - | **choices**: RO, RW, SAME, NOWAIT, NOSECURITY + | **default**: rw + | **choices**: ro, rw, same, nowait, nosecurity src_params @@ -184,27 +184,27 @@ tag_untagged When the file system is unmounted, the tags are lost. - If *tag_untagged=NOTEXT* none of the untagged files in the file system are automatically converted during file reading and writing. + If \ :emphasis:`tag\_untagged=notext`\ none of the untagged files in the file system are automatically converted during file reading and writing. - If *tag_untagged=TEXT* each untagged file is implicitly marked as containing pure text data that can be converted. + If \ :emphasis:`tag\_untagged=text`\ each untagged file is implicitly marked as containing pure text data that can be converted. - If this flag is used, use of tag_ccsid is encouraged. + If this flag is used, use of tag\_ccsid is encouraged. | **required**: False | **type**: str - | **choices**: TEXT, NOTEXT + | **choices**: text, notext tag_ccsid Identifies the coded character set identifier (ccsid) to be implicitly set for the untagged file. - For more on coded character set identifiers, review the IBM documentation topic **Coded Character Sets**. + For more on coded character set identifiers, review the IBM documentation topic \ :strong:`Coded Character Sets`\ . Specified as a decimal value from 0 to 65535. However, when TEXT is specified, the value must be between 0 and 65535. The value is not checked as being valid and the corresponding code page is not checked as being installed. - Required when *tag_untagged=TEXT*. + Required when \ :emphasis:`tag\_untagged=TEXT`\ . | **required**: False | **type**: int @@ -214,10 +214,10 @@ allow_uid Specifies whether the SETUID and SETGID mode bits on an executable in this file system are considered. Also determines whether the APF extended attribute or the Program Control extended attribute is honored. - If *allow_uid=True* the SETUID and SETGID mode bits are considered when a program in this file system is run. SETUID is the default. + If \ :emphasis:`allow\_uid=True`\ the SETUID and SETGID mode bits are considered when a program in this file system is run. SETUID is the default. - If *allow_uid=False* the SETUID and SETGID mode bits are ignored when a program in this file system is run. The program runs as though the SETUID and SETGID mode bits were not set. Also, if you specify the NOSETUID option on MOUNT, the APF extended attribute and the Program Control Bit values are ignored. + If \ :emphasis:`allow\_uid=False`\ the SETUID and SETGID mode bits are ignored when a program in this file system is run. The program runs as though the SETUID and SETGID mode bits were not set. Also, if you specify the NOSETUID option on MOUNT, the APF extended attribute and the Program Control Bit values are ignored. | **required**: False @@ -226,10 +226,10 @@ allow_uid sysname - For systems participating in shared file system, *sysname* specifies the particular system on which a mount should be performed. This system will then become the owner of the file system mounted. This system must be IPLed with SYSPLEX(YES). + For systems participating in shared file system, \ :emphasis:`sysname`\ specifies the particular system on which a mount should be performed. This system will then become the owner of the file system mounted. This system must be IPLed with SYSPLEX(YES). - *sysname* is the name of a system participating in shared file system. The name must be 1-8 characters long; the valid characters are A-Z, 0-9, $, @, and #. + \ :emphasis:`sysname`\ is the name of a system participating in shared file system. The name must be 1-8 characters long; the valid characters are A-Z, 0-9, $, @, and #. | **required**: False @@ -240,23 +240,23 @@ automove These parameters apply only in a sysplex where systems are exploiting the shared file system capability. They specify what happens to the ownership of a file system when a shutdown, PFS termination, dead system takeover, or file system move occurs. The default setting is AUTOMOVE where the file system will be randomly moved to another system (no system list used). - *automove=AUTOMOVE* indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. + \ :emphasis:`automove=automove`\ indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. - *automove=NOAUTOMOVE* prevents movement of the file system's ownership in some situations. + \ :emphasis:`automove=noautomove`\ prevents movement of the file system's ownership in some situations. - *automove=UNMOUNT* allows the file system to be unmounted in some situations. + \ :emphasis:`automove=unmount`\ allows the file system to be unmounted in some situations. | **required**: False | **type**: str - | **default**: AUTOMOVE - | **choices**: AUTOMOVE, NOAUTOMOVE, UNMOUNT + | **default**: automove + | **choices**: automove, noautomove, unmount automove_list - If(automove=AUTOMOVE), this option will be checked. + If(automove=automove), this option will be checked. This specifies the list of servers to include or exclude as destinations. @@ -275,7 +275,7 @@ automove_list tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -293,14 +293,14 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted - name: Unmount a filesystem. zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: unmounted unmount_opts: REMOUNT opts: same @@ -309,7 +309,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted mount_opts: RO @@ -317,7 +317,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted persistent: data_store: SYS1.PARMLIB(BPXPRMAA) @@ -327,7 +327,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted persistent: data_store: SYS1.PARMLIB(BPXPRMAA) @@ -339,7 +339,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted allow_uid: no @@ -347,7 +347,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted opts: nowait @@ -355,7 +355,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted mount_opts: NOSECURITY @@ -363,7 +363,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted automove: AUTOMOVE automove_list: I,DEV1,DEV2,DEV3,DEV9 @@ -372,7 +372,7 @@ Examples zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted automove: AUTOMOVE automove_list: EXCLUDE,DEV4,DEV5,DEV6,DEV7 @@ -389,7 +389,7 @@ Notes If an uncataloged data set needs to be fetched, it should be cataloged first. - Uncataloged data sets can be cataloged using the `zos_data_set <./zos_data_set.html>`_ module. + Uncataloged data sets can be cataloged using the \ `zos\_data\_set <./zos_data_set.html>`__\ module. @@ -467,7 +467,7 @@ persistent | **sample**: SYS1.FILESYS(PRMAABAK) comment - The text that was used in markers around the *Persistent/data_store* entry. + The text that was used in markers around the \ :emphasis:`Persistent/data\_store`\ entry. | **returned**: always | **type**: list @@ -529,7 +529,7 @@ allow_uid true sysname - *sysname* specifies the particular system on which a mount should be performed. + \ :emphasis:`sysname`\ specifies the particular system on which a mount should be performed. | **returned**: if Non-None | **type**: str diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index 3ebedadd5..c0551786e 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -45,9 +45,9 @@ parm auth Determines whether this program should run with authorized privileges. - If *auth=true*, the program runs as APF authorized. + If \ :emphasis:`auth=true`\ , the program runs as APF authorized. - If *auth=false*, the program runs as unauthorized. + If \ :emphasis:`auth=false`\ , the program runs as unauthorized. | **required**: False | **type**: bool @@ -57,7 +57,7 @@ auth verbose Determines if verbose output should be returned from the underlying utility used by this module. - When *verbose=true* verbose output is returned on module failure. + When \ :emphasis:`verbose=true`\ verbose output is returned on module failure. | **required**: False | **type**: bool @@ -67,19 +67,19 @@ verbose dds The input data source. - *dds* supports 6 types of sources + \ :emphasis:`dds`\ supports 6 types of sources - 1. *dd_data_set* for data set files. + 1. \ :emphasis:`dd\_data\_set`\ for data set files. - 2. *dd_unix* for UNIX files. + 2. \ :emphasis:`dd\_unix`\ for UNIX files. - 3. *dd_input* for in-stream data set. + 3. \ :emphasis:`dd\_input`\ for in-stream data set. - 4. *dd_dummy* for no content input. + 4. \ :emphasis:`dd\_dummy`\ for no content input. - 5. *dd_concat* for a data set concatenation. + 5. \ :emphasis:`dd\_concat`\ for a data set concatenation. - 6. *dds* supports any combination of source types. + 6. \ :emphasis:`dds`\ supports any combination of source types. | **required**: False | **type**: list @@ -89,7 +89,7 @@ dds dd_data_set Specify a data set. - *dd_data_set* can reference an existing data set or be used to define a new data set to be created during execution. + \ :emphasis:`dd\_data\_set`\ can reference an existing data set or be used to define a new data set to be created during execution. | **required**: False | **type**: dict @@ -110,7 +110,7 @@ dds type - The data set type. Only required when *disposition=new*. + The data set type. Only required when \ :emphasis:`disposition=new`\ . Maps to DSNTYPE on z/OS. @@ -120,7 +120,7 @@ dds disposition - *disposition* indicates the status of a data set. + \ :emphasis:`disposition`\ indicates the status of a data set. Defaults to shr. @@ -130,31 +130,31 @@ dds disposition_normal - *disposition_normal* indicates what to do with the data set after a normal termination of the program. + \ :emphasis:`disposition\_normal`\ indicates what to do with the data set after a normal termination of the program. | **required**: False | **type**: str - | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog + | **choices**: delete, keep, catalog, uncatalog disposition_abnormal - *disposition_abnormal* indicates what to do with the data set after an abnormal termination of the program. + \ :emphasis:`disposition\_abnormal`\ indicates what to do with the data set after an abnormal termination of the program. | **required**: False | **type**: str - | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog + | **choices**: delete, keep, catalog, uncatalog reuse - Determines if a data set should be reused if *disposition=NEW* and if a data set with a matching name already exists. + Determines if a data set should be reused if \ :emphasis:`disposition=new`\ and if a data set with a matching name already exists. - If *reuse=true*, *disposition* will be automatically switched to ``SHR``. + If \ :emphasis:`reuse=true`\ , \ :emphasis:`disposition`\ will be automatically switched to \ :literal:`SHR`\ . - If *reuse=false*, and a data set with a matching name already exists, allocation will fail. + If \ :emphasis:`reuse=false`\ , and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with *replace*. + Mutually exclusive with \ :emphasis:`replace`\ . - *reuse* is only considered when *disposition=NEW* + \ :emphasis:`reuse`\ is only considered when \ :emphasis:`disposition=new`\ | **required**: False | **type**: bool @@ -162,17 +162,17 @@ dds replace - Determines if a data set should be replaced if *disposition=NEW* and a data set with a matching name already exists. + Determines if a data set should be replaced if \ :emphasis:`disposition=new`\ and a data set with a matching name already exists. - If *replace=true*, the original data set will be deleted, and a new data set created. + If \ :emphasis:`replace=true`\ , the original data set will be deleted, and a new data set created. - If *replace=false*, and a data set with a matching name already exists, allocation will fail. + If \ :emphasis:`replace=false`\ , and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with *reuse*. + Mutually exclusive with \ :emphasis:`reuse`\ . - *replace* is only considered when *disposition=NEW* + \ :emphasis:`replace`\ is only considered when \ :emphasis:`disposition=new`\ - *replace* will result in loss of all data in the original data set unless *backup* is specified. + \ :emphasis:`replace`\ will result in loss of all data in the original data set unless \ :emphasis:`backup`\ is specified. | **required**: False | **type**: bool @@ -180,9 +180,9 @@ dds backup - Determines if a backup should be made of an existing data set when *disposition=NEW*, *replace=true*, and a data set with the desired name is found. + Determines if a backup should be made of an existing data set when \ :emphasis:`disposition=new`\ , \ :emphasis:`replace=true`\ , and a data set with the desired name is found. - *backup* is only used when *replace=true*. + \ :emphasis:`backup`\ is only used when \ :emphasis:`replace=true`\ . | **required**: False | **type**: bool @@ -190,7 +190,7 @@ dds space_type - The unit of measurement to use when allocating space for a new data set using *space_primary* and *space_secondary*. + The unit of measurement to use when allocating space for a new data set using \ :emphasis:`space\_primary`\ and \ :emphasis:`space\_secondary`\ . | **required**: False | **type**: str @@ -200,9 +200,9 @@ dds space_primary The primary amount of space to allocate for a new data set. - The value provided to *space_type* is used as the unit of space for the allocation. + The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. - Not applicable when *space_type=blklgth* or *space_type=reclgth*. + Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . | **required**: False | **type**: int @@ -211,9 +211,9 @@ dds space_secondary When primary allocation of space is filled, secondary space will be allocated with the provided size as needed. - The value provided to *space_type* is used as the unit of space for the allocation. + The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. - Not applicable when *space_type=blklgth* or *space_type=reclgth*. + Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . | **required**: False | **type**: int @@ -231,7 +231,7 @@ dds sms_management_class The desired management class for a new SMS-managed data set. - *sms_management_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_management\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -242,7 +242,7 @@ dds sms_storage_class The desired storage class for a new SMS-managed data set. - *sms_storage_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_storage\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -253,7 +253,7 @@ dds sms_data_class The desired data class for a new SMS-managed data set. - *sms_data_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_data\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -264,7 +264,7 @@ dds block_size The maximum length of a block in bytes. - Default is dependent on *record_format* + Default is dependent on \ :emphasis:`record\_format`\ | **required**: False | **type**: int @@ -280,9 +280,9 @@ dds key_label The label for the encryption key used by the system to encrypt the data set. - *key_label* is the public name of a protected encryption key in the ICSF key repository. + \ :emphasis:`key\_label`\ is the public name of a protected encryption key in the ICSF key repository. - *key_label* should only be provided when creating an extended format data set. + \ :emphasis:`key\_label`\ should only be provided when creating an extended format data set. Maps to DSKEYLBL on z/OS. @@ -304,7 +304,7 @@ dds Key label must have a private key associated with it. - *label* can be a maximum of 64 characters. + \ :emphasis:`label`\ can be a maximum of 64 characters. Maps to KEYLAB1 on z/OS. @@ -313,9 +313,9 @@ dds encoding - How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. - *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding. + \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. Maps to KEYCD1 on z/OS. @@ -339,7 +339,7 @@ dds Key label must have a private key associated with it. - *label* can be a maximum of 64 characters. + \ :emphasis:`label`\ can be a maximum of 64 characters. Maps to KEYLAB2 on z/OS. @@ -348,9 +348,9 @@ dds encoding - How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. - *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding. + \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. Maps to KEYCD2 on z/OS. @@ -363,7 +363,7 @@ dds key_length The length of the keys used in a new data set. - If using SMS, setting *key_length* overrides the key length defined in the SMS data class of the data set. + If using SMS, setting \ :emphasis:`key\_length`\ overrides the key length defined in the SMS data class of the data set. Valid values are (0-255 non-vsam), (1-255 vsam). @@ -376,20 +376,20 @@ dds The first byte of a logical record is position 0. - Provide *key_offset* only for VSAM key-sequenced data sets. + Provide \ :emphasis:`key\_offset`\ only for VSAM key-sequenced data sets. | **required**: False | **type**: int record_length - The logical record length. (e.g ``80``). + The logical record length. (e.g \ :literal:`80`\ ). For variable data sets, the length must include the 4-byte prefix area. Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0. - Valid values are (1-32760 for non-vsam, 1-32761 for vsam). + Valid values are (1-32760 for non-VSAM, 1-32761 for VSAM). Maps to LRECL on z/OS. @@ -417,11 +417,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -463,7 +463,7 @@ dds path The path to an existing UNIX file. - Or provide the path to an new created UNIX file when *status_group=OCREAT*. + Or provide the path to an new created UNIX file when \ :emphasis:`status\_group=OCREAT`\ . The provided path must be absolute. @@ -488,7 +488,7 @@ dds mode - The file access attributes when the UNIX file is created specified in *path*. + The file access attributes when the UNIX file is created specified in \ :emphasis:`path`\ . Specify the mode as an octal number similarly to chmod. @@ -499,47 +499,47 @@ dds status_group - The status for the UNIX file specified in *path*. + The status for the UNIX file specified in \ :emphasis:`path`\ . - If you do not specify a value for the *status_group* parameter, the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. + If you do not specify a value for the \ :emphasis:`status\_group`\ parameter, the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. Maps to PATHOPTS status group file options on z/OS. You can specify up to 6 choices. - *oappend* sets the file offset to the end of the file before each write, so that data is written at the end of the file. + \ :emphasis:`oappend`\ sets the file offset to the end of the file before each write, so that data is written at the end of the file. - *ocreat* specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, a new directory and a new file are not created. If the file already exists and *oexcl* was not specified, the system allows the program to use the existing file. If the file already exists and *oexcl* was specified, the system fails the allocation and the job step. + \ :emphasis:`ocreat`\ specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, a new directory and a new file are not created. If the file already exists and \ :emphasis:`oexcl`\ was not specified, the system allows the program to use the existing file. If the file already exists and \ :emphasis:`oexcl`\ was specified, the system fails the allocation and the job step. - *oexcl* specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores *oexcl* if *ocreat* is not also specified. + \ :emphasis:`oexcl`\ specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores \ :emphasis:`oexcl`\ if \ :emphasis:`ocreat`\ is not also specified. - *onoctty* specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. + \ :emphasis:`onoctty`\ specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. - *ononblock* specifies the following, depending on the type of file + \ :emphasis:`ononblock`\ specifies the following, depending on the type of file For a FIFO special file - 1. With *ononblock* specified and *ordonly* access, an open function for reading-only returns without delay. + 1. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`ordonly`\ access, an open function for reading-only returns without delay. - 2. With *ononblock* not specified and *ordonly* access, an open function for reading-only blocks (waits) until a process opens the file for writing. + 2. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`ordonly`\ access, an open function for reading-only blocks (waits) until a process opens the file for writing. - 3. With *ononblock* specified and *owronly* access, an open function for writing-only returns an error if no process currently has the file open for reading. + 3. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`owronly`\ access, an open function for writing-only returns an error if no process currently has the file open for reading. - 4. With *ononblock* not specified and *owronly* access, an open function for writing-only blocks (waits) until a process opens the file for reading. + 4. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`owronly`\ access, an open function for writing-only blocks (waits) until a process opens the file for reading. 5. For a character special file that supports nonblocking open - 6. If *ononblock* is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. + 6. If \ :emphasis:`ononblock`\ is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. - 7. If *ononblock* is not specified, an open function blocks (waits) until the device is ready or available. + 7. If \ :emphasis:`ononblock`\ is not specified, an open function blocks (waits) until the device is ready or available. - *ononblock* has no effect on other file types. + \ :emphasis:`ononblock`\ has no effect on other file types. - *osync* specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. + \ :emphasis:`osync`\ specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. - *otrunc* specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with *ordwr* or *owronly*. + \ :emphasis:`otrunc`\ specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with \ :emphasis:`ordwr`\ or \ :emphasis:`owronly`\ . - When *otrunc* is specified, the system does not change the mode and owner. *otrunc* has no effect on FIFO special files or character special files. + When \ :emphasis:`otrunc`\ is specified, the system does not change the mode and owner. \ :emphasis:`otrunc`\ has no effect on FIFO special files or character special files. | **required**: False | **type**: list @@ -548,7 +548,7 @@ dds access_group - The kind of access to request for the UNIX file specified in *path*. + The kind of access to request for the UNIX file specified in \ :emphasis:`path`\ . | **required**: False | **type**: str @@ -556,7 +556,7 @@ dds file_data_type - The type of data that is (or will be) stored in the file specified in *path*. + The type of data that is (or will be) stored in the file specified in \ :emphasis:`path`\ . Maps to FILEDATA on z/OS. @@ -569,7 +569,7 @@ dds block_size The block size, in bytes, for the UNIX file. - Default is dependent on *record_format* + Default is dependent on \ :emphasis:`record\_format`\ | **required**: False | **type**: int @@ -578,7 +578,7 @@ dds record_length The logical record length for the UNIX file. - *record_length* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. + \ :emphasis:`record\_length`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. Maps to LRECL on z/OS. @@ -589,7 +589,7 @@ dds record_format The record format for the UNIX file. - *record_format* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. + \ :emphasis:`record\_format`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. | **required**: False | **type**: str @@ -608,11 +608,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -638,7 +638,7 @@ dds dd_input - *dd_input* is used to specify an in-stream data set. + \ :emphasis:`dd\_input`\ is used to specify an in-stream data set. Input will be saved to a temporary data set with a record length of 80. @@ -656,15 +656,15 @@ dds content The input contents for the DD. - *dd_input* supports single or multiple lines of input. + \ :emphasis:`dd\_input`\ supports single or multiple lines of input. Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. If a list of strings is provided, newlines will be added to each of the lines when used as input. - If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. + If a multi-line string is provided, use the proper block scalar style. YAML supports both \ `literal <https://yaml.org/spec/1.2.2/#literal-style>`__\ and \ `folded <https://yaml.org/spec/1.2.2/#line-folding>`__\ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; \ :emphasis:`content: | 2`\ is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block \ `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`__\ indicators "+" and "-" as well. - When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. + When using the \ :emphasis:`content`\ option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all \ :emphasis:`content`\ types; string, list of strings and when using a YAML block indicator. | **required**: True | **type**: raw @@ -682,11 +682,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -696,7 +696,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for *dd_input*, *src_encoding* should generally not need to be changed. + for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. | **required**: False | **type**: str @@ -714,7 +714,7 @@ dds dd_output - Use *dd_output* to specify - Content sent to the DD should be returned to the user. + Use \ :emphasis:`dd\_output`\ to specify - Content sent to the DD should be returned to the user. | **required**: False | **type**: dict @@ -739,11 +739,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -753,7 +753,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for *dd_input*, *src_encoding* should generally not need to be changed. + for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. | **required**: False | **type**: str @@ -771,9 +771,9 @@ dds dd_dummy - Use *dd_dummy* to specify - No device or external storage space is to be allocated to the data set. - No disposition processing is to be performed on the data set. + Use \ :emphasis:`dd\_dummy`\ to specify - No device or external storage space is to be allocated to the data set. - No disposition processing is to be performed on the data set. - *dd_dummy* accepts no content input. + \ :emphasis:`dd\_dummy`\ accepts no content input. | **required**: False | **type**: dict @@ -788,7 +788,7 @@ dds dd_vio - *dd_vio* is used to handle temporary data sets. + \ :emphasis:`dd\_vio`\ is used to handle temporary data sets. VIO data sets reside in the paging space; but, to the problem program and the access method, the data sets appear to reside on a direct access storage device. @@ -807,7 +807,7 @@ dds dd_concat - *dd_concat* is used to specify a data set concatenation. + \ :emphasis:`dd\_concat`\ is used to specify a data set concatenation. | **required**: False | **type**: dict @@ -821,7 +821,7 @@ dds dds - A list of DD statements, which can contain any of the following types: *dd_data_set*, *dd_unix*, and *dd_input*. + A list of DD statements, which can contain any of the following types: \ :emphasis:`dd\_data\_set`\ , \ :emphasis:`dd\_unix`\ , and \ :emphasis:`dd\_input`\ . | **required**: False | **type**: list @@ -831,7 +831,7 @@ dds dd_data_set Specify a data set. - *dd_data_set* can reference an existing data set. The data set referenced with ``data_set_name`` must be allocated before the module `zos_mvs_raw <./zos_mvs_raw.html>`_ is run, you can use `zos_data_set <./zos_data_set.html>`_ to allocate a data set. + \ :emphasis:`dd\_data\_set`\ can reference an existing data set. The data set referenced with \ :literal:`data\_set\_name`\ must be allocated before the module \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ is run, you can use \ `zos\_data\_set <./zos_data_set.html>`__\ to allocate a data set. | **required**: False | **type**: dict @@ -845,7 +845,7 @@ dds type - The data set type. Only required when *disposition=new*. + The data set type. Only required when \ :emphasis:`disposition=new`\ . Maps to DSNTYPE on z/OS. @@ -855,7 +855,7 @@ dds disposition - *disposition* indicates the status of a data set. + \ :emphasis:`disposition`\ indicates the status of a data set. Defaults to shr. @@ -865,31 +865,31 @@ dds disposition_normal - *disposition_normal* indicates what to do with the data set after normal termination of the program. + \ :emphasis:`disposition\_normal`\ indicates what to do with the data set after normal termination of the program. | **required**: False | **type**: str - | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog + | **choices**: delete, keep, catalog, uncatalog disposition_abnormal - *disposition_abnormal* indicates what to do with the data set after abnormal termination of the program. + \ :emphasis:`disposition\_abnormal`\ indicates what to do with the data set after abnormal termination of the program. | **required**: False | **type**: str - | **choices**: delete, keep, catlg, catalog, uncatlg, uncatalog + | **choices**: delete, keep, catalog, uncatalog reuse - Determines if data set should be reused if *disposition=NEW* and a data set with matching name already exists. + Determines if data set should be reused if \ :emphasis:`disposition=new`\ and a data set with matching name already exists. - If *reuse=true*, *disposition* will be automatically switched to ``SHR``. + If \ :emphasis:`reuse=true`\ , \ :emphasis:`disposition`\ will be automatically switched to \ :literal:`SHR`\ . - If *reuse=false*, and a data set with a matching name already exists, allocation will fail. + If \ :emphasis:`reuse=false`\ , and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with *replace*. + Mutually exclusive with \ :emphasis:`replace`\ . - *reuse* is only considered when *disposition=NEW* + \ :emphasis:`reuse`\ is only considered when \ :emphasis:`disposition=new`\ | **required**: False | **type**: bool @@ -897,17 +897,17 @@ dds replace - Determines if data set should be replaced if *disposition=NEW* and a data set with matching name already exists. + Determines if data set should be replaced if \ :emphasis:`disposition=new`\ and a data set with matching name already exists. - If *replace=true*, the original data set will be deleted, and a new data set created. + If \ :emphasis:`replace=true`\ , the original data set will be deleted, and a new data set created. - If *replace=false*, and a data set with a matching name already exists, allocation will fail. + If \ :emphasis:`replace=false`\ , and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with *reuse*. + Mutually exclusive with \ :emphasis:`reuse`\ . - *replace* is only considered when *disposition=NEW* + \ :emphasis:`replace`\ is only considered when \ :emphasis:`disposition=new`\ - *replace* will result in loss of all data in the original data set unless *backup* is specified. + \ :emphasis:`replace`\ will result in loss of all data in the original data set unless \ :emphasis:`backup`\ is specified. | **required**: False | **type**: bool @@ -915,9 +915,9 @@ dds backup - Determines if a backup should be made of existing data set when *disposition=NEW*, *replace=true*, and a data set with the desired name is found. + Determines if a backup should be made of existing data set when \ :emphasis:`disposition=new`\ , \ :emphasis:`replace=true`\ , and a data set with the desired name is found. - *backup* is only used when *replace=true*. + \ :emphasis:`backup`\ is only used when \ :emphasis:`replace=true`\ . | **required**: False | **type**: bool @@ -925,7 +925,7 @@ dds space_type - The unit of measurement to use when allocating space for a new data set using *space_primary* and *space_secondary*. + The unit of measurement to use when allocating space for a new data set using \ :emphasis:`space\_primary`\ and \ :emphasis:`space\_secondary`\ . | **required**: False | **type**: str @@ -935,9 +935,9 @@ dds space_primary The primary amount of space to allocate for a new data set. - The value provided to *space_type* is used as the unit of space for the allocation. + The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. - Not applicable when *space_type=blklgth* or *space_type=reclgth*. + Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . | **required**: False | **type**: int @@ -946,9 +946,9 @@ dds space_secondary When primary allocation of space is filled, secondary space will be allocated with the provided size as needed. - The value provided to *space_type* is used as the unit of space for the allocation. + The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. - Not applicable when *space_type=blklgth* or *space_type=reclgth*. + Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . | **required**: False | **type**: int @@ -966,7 +966,7 @@ dds sms_management_class The desired management class for a new SMS-managed data set. - *sms_management_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_management\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -977,7 +977,7 @@ dds sms_storage_class The desired storage class for a new SMS-managed data set. - *sms_storage_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_storage\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -988,7 +988,7 @@ dds sms_data_class The desired data class for a new SMS-managed data set. - *sms_data_class* is ignored if specified for an existing data set. + \ :emphasis:`sms\_data\_class`\ is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -999,7 +999,7 @@ dds block_size The maximum length of a block in bytes. - Default is dependent on *record_format* + Default is dependent on \ :emphasis:`record\_format`\ | **required**: False | **type**: int @@ -1015,9 +1015,9 @@ dds key_label The label for the encryption key used by the system to encrypt the data set. - *key_label* is the public name of a protected encryption key in the ICSF key repository. + \ :emphasis:`key\_label`\ is the public name of a protected encryption key in the ICSF key repository. - *key_label* should only be provided when creating an extended format data set. + \ :emphasis:`key\_label`\ should only be provided when creating an extended format data set. Maps to DSKEYLBL on z/OS. @@ -1039,7 +1039,7 @@ dds Key label must have a private key associated with it. - *label* can be a maximum of 64 characters. + \ :emphasis:`label`\ can be a maximum of 64 characters. Maps to KEYLAB1 on z/OS. @@ -1048,9 +1048,9 @@ dds encoding - How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. - *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding. + \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. Maps to KEYCD1 on z/OS. @@ -1074,7 +1074,7 @@ dds Key label must have a private key associated with it. - *label* can be a maximum of 64 characters. + \ :emphasis:`label`\ can be a maximum of 64 characters. Maps to KEYLAB2 on z/OS. @@ -1083,9 +1083,9 @@ dds encoding - How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. - *encoding* can either be set to ``L`` for label encoding, or ``H`` for hash encoding. + \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. Maps to KEYCD2 on z/OS. @@ -1098,7 +1098,7 @@ dds key_length The length of the keys used in a new data set. - If using SMS, setting *key_length* overrides the key length defined in the SMS data class of the data set. + If using SMS, setting \ :emphasis:`key\_length`\ overrides the key length defined in the SMS data class of the data set. Valid values are (0-255 non-vsam), (1-255 vsam). @@ -1111,14 +1111,14 @@ dds The first byte of a logical record is position 0. - Provide *key_offset* only for VSAM key-sequenced data sets. + Provide \ :emphasis:`key\_offset`\ only for VSAM key-sequenced data sets. | **required**: False | **type**: int record_length - The logical record length. (e.g ``80``). + The logical record length. (e.g \ :literal:`80`\ ). For variable data sets, the length must include the 4-byte prefix area. @@ -1152,11 +1152,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -1191,7 +1191,7 @@ dds path The path to an existing UNIX file. - Or provide the path to an new created UNIX file when *status_group=OCREAT*. + Or provide the path to an new created UNIX file when \ :emphasis:`status\_group=ocreat`\ . The provided path must be absolute. @@ -1216,7 +1216,7 @@ dds mode - The file access attributes when the UNIX file is created specified in *path*. + The file access attributes when the UNIX file is created specified in \ :emphasis:`path`\ . Specify the mode as an octal number similar to chmod. @@ -1227,47 +1227,47 @@ dds status_group - The status for the UNIX file specified in *path*. + The status for the UNIX file specified in \ :emphasis:`path`\ . - If you do not specify a value for the *status_group* parameter the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. + If you do not specify a value for the \ :emphasis:`status\_group`\ parameter the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. Maps to PATHOPTS status group file options on z/OS. You can specify up to 6 choices. - *oappend* sets the file offset to the end of the file before each write, so that data is written at the end of the file. + \ :emphasis:`oappend`\ sets the file offset to the end of the file before each write, so that data is written at the end of the file. - *ocreat* specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, one is not created, and the new file is not created. If the file already exists and *oexcl* was not specified, the system allows the program to use the existing file. If the file already exists and *oexcl* was specified, the system fails the allocation and the job step. + \ :emphasis:`ocreat`\ specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, one is not created, and the new file is not created. If the file already exists and \ :emphasis:`oexcl`\ was not specified, the system allows the program to use the existing file. If the file already exists and \ :emphasis:`oexcl`\ was specified, the system fails the allocation and the job step. - *oexcl* specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores *oexcl* if *ocreat* is not also specified. + \ :emphasis:`oexcl`\ specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores \ :emphasis:`oexcl`\ if \ :emphasis:`ocreat`\ is not also specified. - *onoctty* specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. + \ :emphasis:`onoctty`\ specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. - *ononblock* specifies the following, depending on the type of file + \ :emphasis:`ononblock`\ specifies the following, depending on the type of file For a FIFO special file - 1. With *ononblock* specified and *ordonly* access, an open function for reading-only returns without delay. + 1. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`ordonly`\ access, an open function for reading-only returns without delay. - 2. With *ononblock* not specified and *ordonly* access, an open function for reading-only blocks (waits) until a process opens the file for writing. + 2. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`ordonly`\ access, an open function for reading-only blocks (waits) until a process opens the file for writing. - 3. With *ononblock* specified and *owronly* access, an open function for writing-only returns an error if no process currently has the file open for reading. + 3. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`owronly`\ access, an open function for writing-only returns an error if no process currently has the file open for reading. - 4. With *ononblock* not specified and *owronly* access, an open function for writing-only blocks (waits) until a process opens the file for reading. + 4. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`owronly`\ access, an open function for writing-only blocks (waits) until a process opens the file for reading. 5. For a character special file that supports nonblocking open - 6. If *ononblock* is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. + 6. If \ :emphasis:`ononblock`\ is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. - 7. If *ononblock* is not specified, an open function blocks (waits) until the device is ready or available. + 7. If \ :emphasis:`ononblock`\ is not specified, an open function blocks (waits) until the device is ready or available. - *ononblock* has no effect on other file types. + \ :emphasis:`ononblock`\ has no effect on other file types. - *osync* specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. + \ :emphasis:`osync`\ specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. - *otrunc* specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with *ordwr* or *owronly*. + \ :emphasis:`otrunc`\ specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with \ :emphasis:`ordwr`\ or \ :emphasis:`owronly`\ . - When *otrunc* is specified, the system does not change the mode and owner. *otrunc* has no effect on FIFO special files or character special files. + When \ :emphasis:`otrunc`\ is specified, the system does not change the mode and owner. \ :emphasis:`otrunc`\ has no effect on FIFO special files or character special files. | **required**: False | **type**: list @@ -1276,7 +1276,7 @@ dds access_group - The kind of access to request for the UNIX file specified in *path*. + The kind of access to request for the UNIX file specified in \ :emphasis:`path`\ . | **required**: False | **type**: str @@ -1284,7 +1284,7 @@ dds file_data_type - The type of data that is (or will be) stored in the file specified in *path*. + The type of data that is (or will be) stored in the file specified in \ :emphasis:`path`\ . Maps to FILEDATA on z/OS. @@ -1297,7 +1297,7 @@ dds block_size The block size, in bytes, for the UNIX file. - Default is dependent on *record_format* + Default is dependent on \ :emphasis:`record\_format`\ | **required**: False | **type**: int @@ -1306,7 +1306,7 @@ dds record_length The logical record length for the UNIX file. - *record_length* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. + \ :emphasis:`record\_length`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. Maps to LRECL on z/OS. @@ -1317,7 +1317,7 @@ dds record_format The record format for the UNIX file. - *record_format* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. + \ :emphasis:`record\_format`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. | **required**: False | **type**: str @@ -1336,11 +1336,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -1366,7 +1366,7 @@ dds dd_input - *dd_input* is used to specify an in-stream data set. + \ :emphasis:`dd\_input`\ is used to specify an in-stream data set. Input will be saved to a temporary data set with a record length of 80. @@ -1377,15 +1377,15 @@ dds content The input contents for the DD. - *dd_input* supports single or multiple lines of input. + \ :emphasis:`dd\_input`\ supports single or multiple lines of input. Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. If a list of strings is provided, newlines will be added to each of the lines when used as input. - If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. + If a multi-line string is provided, use the proper block scalar style. YAML supports both \ `literal <https://yaml.org/spec/1.2.2/#literal-style>`__\ and \ `folded <https://yaml.org/spec/1.2.2/#line-folding>`__\ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; \ :emphasis:`content: | 2`\ is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block \ `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`__\ indicators "+" and "-" as well. - When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. + When using the \ :emphasis:`content`\ option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all \ :emphasis:`content`\ types; string, list of strings and when using a YAML block indicator. | **required**: True | **type**: raw @@ -1403,11 +1403,11 @@ dds type The type of the content to be returned. - ``text`` means return content in encoding specified by *response_encoding*. + \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . - *src_encoding* and *response_encoding* are only used when *type=text*. + \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . - ``base64`` means return content in binary mode. + \ :literal:`base64`\ means return content in binary mode. | **required**: True | **type**: str @@ -1417,7 +1417,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for *dd_input*, *src_encoding* should generally not need to be changed. + for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. | **required**: False | **type**: str @@ -1440,7 +1440,7 @@ dds tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -1756,11 +1756,11 @@ Notes ----- .. note:: - When executing programs using `zos_mvs_raw <./zos_mvs_raw.html>`_, you may encounter errors that originate in the programs implementation. Two such known issues are noted below of which one has been addressed with an APAR. + When executing programs using \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ , you may encounter errors that originate in the programs implementation. Two such known issues are noted below of which one has been addressed with an APAR. - 1. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking Database Image Copy 2 Utility or Database Recovery Utility in conjunction with FlashCopy or Fast Replication. + 1. \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ module execution fails when invoking Database Image Copy 2 Utility or Database Recovery Utility in conjunction with FlashCopy or Fast Replication. - 2. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. + 2. \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. 3. When executing a program, refer to the programs documentation as each programs requirments can vary fom DDs, instream-data indentation and continuation characters. @@ -1838,7 +1838,7 @@ backups | **type**: str backup_name - The name of the data set containing the backup of content from data set in original_name. + The name of the data set containing the backup of content from data set in original\_name. | **type**: str diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 9ad26d64c..ff1e5fe87 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -52,7 +52,7 @@ wait_time_s This option is helpful on a busy system requiring more time to execute commands. - Setting *wait* can instruct if execution should wait the full *wait_time_s*. + Setting \ :emphasis:`wait`\ can instruct if execution should wait the full \ :emphasis:`wait\_time\_s`\ . | **required**: False | **type**: int diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index b2e99d399..a03a17fdc 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -31,7 +31,7 @@ system If the system name is not specified, all outstanding messages for that system and for the local systems attached to it are returned. - A trailing asterisk, (*) wildcard is supported. + A trailing asterisk, (\*) wildcard is supported. | **required**: False | **type**: str @@ -42,7 +42,7 @@ message_id If the message identifier is not specified, all outstanding messages for all message identifiers are returned. - A trailing asterisk, (*) wildcard is supported. + A trailing asterisk, (\*) wildcard is supported. | **required**: False | **type**: str @@ -53,7 +53,7 @@ job_name If the message job name is not specified, all outstanding messages for all job names are returned. - A trailing asterisk, (*) wildcard is supported. + A trailing asterisk, (\*) wildcard is supported. | **required**: False | **type**: str @@ -69,24 +69,24 @@ message_filter filter - Specifies the substring or regex to match to the outstanding messages, see *use_regex*. + Specifies the substring or regex to match to the outstanding messages, see \ :emphasis:`use\_regex`\ . All special characters in a filter string that are not a regex are escaped. - Valid Python regular expressions are supported. See `the official documentation <https://docs.python.org/library/re.html>`_ for more information. + Valid Python regular expressions are supported. See \ `the official documentation <https://docs.python.org/library/re.html>`__\ for more information. - Regular expressions are compiled with the flag **re.DOTALL** which makes the **'.'** special character match any character including a newline." + Regular expressions are compiled with the flag \ :strong:`re.DOTALL`\ which makes the \ :strong:`'.'`\ special character match any character including a newline." | **required**: True | **type**: str use_regex - Indicates that the value for *filter* is a regex or a string to match. + Indicates that the value for \ :emphasis:`filter`\ is a regex or a string to match. - If False, the module assumes that *filter* is not a regex and matches the *filter* substring on the outstanding messages. + If False, the module assumes that \ :emphasis:`filter`\ is not a regex and matches the \ :emphasis:`filter`\ substring on the outstanding messages. - If True, the module creates a regex from the *filter* string and matches it to the outstanding messages. + If True, the module creates a regex from the \ :emphasis:`filter`\ string and matches it to the outstanding messages. | **required**: False | **type**: bool @@ -222,7 +222,7 @@ actions | **sample**: STC01537 message_text - Content of the outstanding message requiring operator action awaiting a reply. If *message_filter* is set, *message_text* will be filtered accordingly. + Content of the outstanding message requiring operator action awaiting a reply. If \ :emphasis:`message\_filter`\ is set, \ :emphasis:`message\_text`\ will be filtered accordingly. | **returned**: success | **type**: str diff --git a/docs/source/modules/zos_ping.rst b/docs/source/modules/zos_ping.rst index a4405b473..acb901790 100644 --- a/docs/source/modules/zos_ping.rst +++ b/docs/source/modules/zos_ping.rst @@ -16,9 +16,9 @@ zos_ping -- Ping z/OS and check dependencies. Synopsis -------- -- `zos_ping <./zos_ping.html>`_ verifies the presence of z/OS Web Client Enablement Toolkit, iconv, and Python. -- `zos_ping <./zos_ping.html>`_ returns ``pong`` when the target host is not missing any required dependencies. -- If the target host is missing optional dependencies, the `zos_ping <./zos_ping.html>`_ will return one or more warning messages. +- \ `zos\_ping <./zos_ping.html>`__\ verifies the presence of z/OS Web Client Enablement Toolkit, iconv, and Python. +- \ `zos\_ping <./zos_ping.html>`__\ returns \ :literal:`pong`\ when the target host is not missing any required dependencies. +- If the target host is missing optional dependencies, the \ `zos\_ping <./zos_ping.html>`__\ will return one or more warning messages. - If a required dependency is missing from the target host, an explanatory message will be returned with the module failure. @@ -44,7 +44,7 @@ Notes ----- .. note:: - This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry ``scp_extra_args="-O"`` into the ini file named ``ansible.cfg``. + This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry \ :literal:`scp\_extra\_args="-O"`\ into the ini file named \ :literal:`ansible.cfg`\ . diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index 31b237588..6f36e05e2 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -16,7 +16,7 @@ zos_script -- Run scripts in z/OS Synopsis -------- -- The `zos_script <./zos_script.html>`_ module runs a local or remote script in the remote machine. +- The \ `zos\_script <./zos_script.html>`__\ module runs a local or remote script in the remote machine. @@ -56,7 +56,7 @@ creates encoding Specifies which encodings the script should be converted from and to. - If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. + If \ :literal:`encoding`\ is not provided, the module determines which local and remote charsets to convert the data from and to. | **required**: False | **type**: dict @@ -87,9 +87,9 @@ executable remote_src - If set to ``false``, the module will search the script in the controller. + If set to \ :literal:`false`\ , the module will search the script in the controller. - If set to ``true``, the module will search the script in the remote machine. + If set to \ :literal:`true`\ , the module will search the script in the remote machine. | **required**: False | **type**: bool @@ -103,13 +103,13 @@ removes use_template - Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when ``src`` is a local file or directory. + Only valid when \ :literal:`src`\ is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ | **required**: False | **type**: bool @@ -119,9 +119,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. + Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . - These options are ignored unless ``use_template`` is true. + These options are ignored unless \ :literal:`use\_template`\ is true. | **required**: False | **type**: dict @@ -200,7 +200,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -284,7 +284,7 @@ Notes .. note:: When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. - The location in the z/OS system where local scripts will be copied to can be configured through Ansible's ``remote_tmp`` option. Refer to `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`_ for more information. + The location in the z/OS system where local scripts will be copied to can be configured through Ansible's \ :literal:`remote\_tmp`\ option. Refer to \ `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`__\ for more information. All local scripts copied to a remote z/OS system will be removed from the managed node before the module finishes executing. @@ -292,13 +292,13 @@ Notes The module will only add execution permissions for the file owner. - If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error ``BPXW0003I``. + If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error \ :literal:`BPXW0003I`\ . - For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. + For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . - This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. + This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with \ `zos\_tso\_command <./zos_tso_command.html>`__\ . The community script module does not rely on Python to execute scripts on a managed node, while this module does. Python must be present on the remote machine. diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index 4af6b1b52..b35c13a1b 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -40,7 +40,7 @@ commands max_rc Specifies the maximum return code allowed for a TSO command. - If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. + If more than one TSO command is submitted, the \ :emphasis:`max\_rc`\ applies to all TSO commands. | **required**: False | **type**: int @@ -119,7 +119,7 @@ output max_rc Specifies the maximum return code allowed for a TSO command. - If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. + If more than one TSO command is submitted, the \ :emphasis:`max\_rc`\ applies to all TSO commands. | **returned**: always | **type**: int diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index 91fa597ee..a53747d6c 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -16,8 +16,8 @@ zos_unarchive -- Unarchive files and data sets in z/OS. Synopsis -------- -- The ``zos_unarchive`` module unpacks an archive after optionally transferring it to the remote system. -- For supported archive formats, see option ``format``. +- The \ :literal:`zos\_unarchive`\ module unpacks an archive after optionally transferring it to the remote system. +- For supported archive formats, see option \ :literal:`format`\ . - Supported sources are USS (UNIX System Services) or z/OS data sets. - Mixing MVS data sets with USS files for unarchiving is not supported. - The archive is sent to the remote as binary, so no encoding is performed. @@ -33,11 +33,11 @@ Parameters src The remote absolute path or data set of the archive to be uncompressed. - *src* can be a USS file or MVS data set name. + \ :emphasis:`src`\ can be a USS file or MVS data set name. USS file paths should be absolute paths. - MVS data sets supported types are ``SEQ``, ``PDS``, ``PDSE``. + MVS data sets supported types are \ :literal:`SEQ`\ , \ :literal:`PDS`\ , \ :literal:`PDSE`\ . | **required**: True | **type**: str @@ -72,14 +72,14 @@ format If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB - When providing the *xmit_log_data_set* name, ensure there is adequate space. + When providing the \ :emphasis:`xmit\_log\_data\_set`\ name, ensure there is adequate space. | **required**: False | **type**: str use_adrdssu - If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using ``xmit`` or ``terse``. + If set to true, the \ :literal:`zos\_archive`\ module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using \ :literal:`xmit`\ or \ :literal:`terse`\ . | **required**: False | **type**: bool @@ -87,7 +87,7 @@ format dest_volumes - When *use_adrdssu=True*, specify the volume the data sets will be written to. + When \ :emphasis:`use\_adrdssu=True`\ , specify the volume the data sets will be written to. If no volume is specified, storage management rules will be used to determine the volume where the file will be unarchived. @@ -103,7 +103,7 @@ format dest The remote absolute path or data set where the content should be unarchived to. - *dest* can be a USS file, directory or MVS data set name. + \ :emphasis:`dest`\ can be a USS file, directory or MVS data set name. If dest has missing parent directories, they will not be created. @@ -116,7 +116,7 @@ group When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if ``dest`` is USS, otherwise ignored. + This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. | **required**: False | **type**: str @@ -125,13 +125,13 @@ group mode The permission of the uncompressed files. - If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. + If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. - The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. + The mode may also be specified as a symbolic mode (for example, \`\`u+rwx\`\` or \`\`u=rw,g=r,o=r\`\`) or a special string \`preserve\`. - *mode=preserve* means that the file will be given the same permissions as the source file. + \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the source file. | **required**: False | **type**: str @@ -149,7 +149,7 @@ owner include A list of directories, files or data set names to extract from the archive. - When ``include`` is set, only those files will we be extracted leaving the remaining files in the archive. + When \ :literal:`include`\ is set, only those files will we be extracted leaving the remaining files in the archive. Mutually exclusive with exclude. @@ -177,7 +177,7 @@ list dest_data_set - Data set attributes to customize a ``dest`` data set that the archive will be copied into. + Data set attributes to customize a \ :literal:`dest`\ data set that the archive will be copied into. | **required**: False | **type**: dict @@ -195,23 +195,23 @@ dest_data_set | **required**: False | **type**: str - | **default**: SEQ - | **choices**: SEQ, PDS, PDSE + | **default**: seq + | **choices**: seq, pds, pdse space_primary - If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int space_secondary - If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. + If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using *space_type*. + The unit of space used is set using \ :emphasis:`space\_type`\ . | **required**: False | **type**: int @@ -220,21 +220,21 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are ``K``, ``M``, ``G``, ``CYL``, and ``TRK``. + Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . | **required**: False | **type**: str - | **choices**: K, M, G, CYL, TRK + | **choices**: k, m, g, cyl, trk record_format - If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) + If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`fb`\ ) - Choices are case-insensitive. + Choices are case-sensitive. | **required**: False | **type**: str - | **choices**: FB, VB, FBA, VBA, U + | **choices**: fb, vb, fba, vba, u record_length @@ -265,9 +265,9 @@ dest_data_set key_offset The key offset to use when creating a KSDS data set. - *key_offset* is required when *type=KSDS*. + \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . - *key_offset* should only be provided when *type=KSDS* + \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -276,9 +276,9 @@ dest_data_set key_length The key length to use when creating a KSDS data set. - *key_length* is required when *type=KSDS*. + \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . - *key_length* should only be provided when *type=KSDS* + \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ | **required**: False | **type**: int @@ -327,7 +327,7 @@ dest_data_set tmp_hlq Override the default high level qualifier (HLQ) for temporary data sets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str @@ -342,9 +342,9 @@ force remote_src - If set to true, ``zos_unarchive`` retrieves the archive from the remote system. + If set to true, \ :literal:`zos\_unarchive`\ retrieves the archive from the remote system. - If set to false, ``zos_unarchive`` searches the local machine (Ansible controller) for the archive. + If set to false, \ :literal:`zos\_unarchive`\ searches the local machine (Ansible controller) for the archive. | **required**: False | **type**: bool @@ -404,7 +404,7 @@ Notes .. note:: VSAMs are not supported. - This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. diff --git a/docs/source/modules/zos_volume_init.rst b/docs/source/modules/zos_volume_init.rst index 195435924..25a0897b9 100644 --- a/docs/source/modules/zos_volume_init.rst +++ b/docs/source/modules/zos_volume_init.rst @@ -17,14 +17,14 @@ zos_volume_init -- Initialize volumes or minidisks. Synopsis -------- - Initialize a volume or minidisk on z/OS. -- *zos_volume_init* will create the volume label and entry into the volume table of contents (VTOC). +- \ :emphasis:`zos\_volume\_init`\ will create the volume label and entry into the volume table of contents (VTOC). - Volumes are used for storing data and executable programs. - A minidisk is a portion of a disk that is linked to your virtual machine. - A VTOC lists the data sets that reside on a volume, their location, size, and other attributes. -- *zos_volume_init* uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class `STGADMIN.ICK.INIT`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. -- ICKDSF is an Authorized Program Facility (APF) program on z/OS, *zos_volume_init* will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. +- \ :emphasis:`zos\_volume\_init`\ uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class \`STGADMIN.ICK.INIT\`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. +- ICKDSF is an Authorized Program Facility (APF) program on z/OS, \ :emphasis:`zos\_volume\_init`\ will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. - Note that defaults set on target z/OS systems may override ICKDSF parameters. -- If is recommended that data on the volume is backed up as the *zos_volume_init* module will not perform any backups. You can use the `zos_backup_restore <./zos_backup_restore.html>`_ module to backup a volume. +- If is recommended that data on the volume is backed up as the \ :emphasis:`zos\_volume\_init`\ module will not perform any backups. You can use the \ `zos\_backup\_restore <./zos_backup_restore.html>`__\ module to backup a volume. @@ -35,9 +35,9 @@ Parameters address - *address* is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. + \ :emphasis:`address`\ is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. - *address* can be the number assigned to the device (device number) when it is installed or the virtual address. + \ :emphasis:`address`\ can be the number assigned to the device (device number) when it is installed or the virtual address. | **required**: True | **type**: str @@ -46,15 +46,15 @@ address verify_volid Verify that the volume serial matches what is on the existing volume or minidisk. - *verify_volid* must be 1 to 6 alphanumeric characters or ``*NONE*``. + \ :emphasis:`verify\_volid`\ must be 1 to 6 alphanumeric characters or \ :literal:`\*NONE\*`\ . - To verify that a volume serial number does not exist, use *verify_volid=*NONE**. + To verify that a volume serial number does not exist, use \ :emphasis:`verify\_volid=\*NONE\*`\ . - If *verify_volid* is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. + If \ :emphasis:`verify\_volid`\ is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. - If *verify_volid=*NONE** is specified and a volume serial is found on the volume or minidisk, initialization does not complete. + If \ :emphasis:`verify\_volid=\*NONE\*`\ is specified and a volume serial is found on the volume or minidisk, initialization does not complete. - Note, this option is **not** a boolean, leave it blank to skip the verification. + Note, this option is \ :strong:`not`\ a boolean, leave it blank to skip the verification. | **required**: False | **type**: str @@ -73,11 +73,11 @@ volid Expects 1-6 alphanumeric, national ($,#,@) or special characters. - A *volid* with less than 6 characters will be padded with spaces. + A \ :emphasis:`volid`\ with less than 6 characters will be padded with spaces. - A *volid* can also be referred to as volser or volume serial number. + A \ :emphasis:`volid`\ can also be referred to as volser or volume serial number. - When *volid* is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. + When \ :emphasis:`volid`\ is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. | **required**: False | **type**: str @@ -99,7 +99,7 @@ index The VTOC index enhances the performance of VTOC access. - When set to *false*, no index will be created. + When set to \ :emphasis:`false`\ , no index will be created. | **required**: False | **type**: bool @@ -109,7 +109,7 @@ index sms_managed Specifies that the volume be managed by Storage Management System (SMS). - If *sms_managed* is *true* then *index* must also be *true*. + If \ :emphasis:`sms\_managed`\ is \ :emphasis:`true`\ then \ :emphasis:`index`\ must also be \ :emphasis:`true`\ . | **required**: False | **type**: bool @@ -127,7 +127,7 @@ verify_volume_empty tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. | **required**: False | **type**: str diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index e9c238b87..e3ea36dc8 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -403,7 +403,7 @@ def _remote_cleanup(self, dest, dest_exists, task_vars): else: module_args = dict(name=dest, state="absent") if is_member(dest): - module_args["type"] = "MEMBER" + module_args["type"] = "member" self._execute_module( module_name="ibm.ibm_zos_core.zos_data_set", module_args=module_args, @@ -466,6 +466,16 @@ def _update_result(is_binary, copy_res, original_args, original_src): updated_result["dest_created"] = True updated_result["destination_attributes"] = dest_data_set_attrs + # Setting attributes to lower case to conform to docs. + # Part of the change to lowercase choices in the collection involves having + # a consistent interface that also returns the same values in lowercase. + if "record_format" in updated_result["destination_attributes"]: + updated_result["destination_attributes"]["record_format"] = updated_result["destination_attributes"]["record_format"].lower() + if "space_type" in updated_result["destination_attributes"]: + updated_result["destination_attributes"]["space_type"] = updated_result["destination_attributes"]["space_type"].lower() + if "type" in updated_result["destination_attributes"]: + updated_result["destination_attributes"]["type"] = updated_result["destination_attributes"]["type"].lower() + return updated_result diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 6bbd0f9d9..8e06c340b 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -44,15 +44,15 @@ def run(self, tmp=None, task_vars=None): use_template = _process_boolean(module_args.get("use_template")) location = module_args.get("location") - if use_template and location != "LOCAL": + if use_template and location != "local": result.update(dict( failed=True, changed=False, - msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'LOCAL'".format(location) + msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'local'".format(location) )) return result - if location == "LOCAL": + if location == "local": source = self._task.args.get("src", None) diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index 6e679d62d..ed508bcf0 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -87,11 +87,11 @@ def run(self, tmp=None, task_vars=None): ) dest = cmd_res.get("stdout") if dest_data_set.get("space_primary") is None: - dest_data_set.update(space_primary=5, space_type="M") + dest_data_set.update(space_primary=5, space_type="m") if format_name == 'terse': - dest_data_set.update(type='SEQ', record_format='FB', record_length=1024) + dest_data_set.update(type='seq', record_format='fb', record_length=1024) if format_name == 'xmit': - dest_data_set.update(type='SEQ', record_format='FB', record_length=80) + dest_data_set.update(type='seq', record_format='fb', record_length=80) copy_module_args.update( dict( diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 3bd502858..40c1a4047 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -919,7 +919,7 @@ def _build_zoau_args(**kwargs): secondary += space_type type = kwargs.get("type") - if type and type == "ZFS": + if type and type.upper() == "ZFS": type = "LDS" volumes = ",".join(volumes) if volumes else None diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 951b6bc87..cbe96b65d 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -81,8 +81,8 @@ type: str required: false choices: - - PACK - - SPACK + - pack + - spack xmit_log_data_set: description: - Provide the name of a data set to store xmit log output. @@ -193,9 +193,9 @@ - Organization of the destination type: str required: false - default: SEQ + default: seq choices: - - SEQ + - seq space_primary: description: - If the destination I(dest) data set does not exist , this sets the @@ -214,28 +214,28 @@ description: - If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false record_format: description: - If the destination data set does not exist, this sets the format of the data set. (e.g C(FB)) - - Choices are case-insensitive. + - Choices are case-sensitive. required: false choices: - - FB - - VB - - FBA - - VBA - - U + - fb + - vb + - fba + - vba + - u type: str record_length: description: @@ -356,7 +356,7 @@ format: name: terse format_options: - terse_pack: "SPACK" + terse_pack: "spack" use_adrdssu: True # Use a pattern to store @@ -795,17 +795,17 @@ def _create_dest_data_set( arguments.update(name=temp_ds) if record_format is None: - arguments.update(record_format="FB") + arguments.update(record_format="fb") if record_length is None: arguments.update(record_length=80) if type is None: - arguments.update(type="SEQ") + arguments.update(type="seq") if space_primary is None: arguments.update(space_primary=5) if space_secondary is None: arguments.update(space_secondary=3) if space_type is None: - arguments.update(space_type="M") + arguments.update(space_type="m") arguments.pop("self") changed = data_set.DataSet.ensure_present(**arguments) return arguments["name"], changed @@ -819,8 +819,8 @@ def create_dest_ds(self, name): name {str} - name of the newly created data set. """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH - data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) - # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='SEQ', record_format='FB', record_length=record_length) + data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) + # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) # rc, out, err = self.module.run_command(cmd) @@ -952,15 +952,19 @@ def compute_dest_size(self): dest_space += int(ds.total_space) # space unit returned from listings is bytes dest_space = math.ceil(dest_space / 1024) - self.dest_data_set.update(space_primary=dest_space, space_type="K") + self.dest_data_set.update(space_primary=dest_space, space_type="k") class AMATerseArchive(MVSArchive): def __init__(self, module): super(AMATerseArchive, self).__init__(module) self.pack_arg = module.params.get("format").get("format_options").get("terse_pack") + # We store pack_ard in uppercase because the AMATerse command requires + # it in uppercase. if self.pack_arg is None: self.pack_arg = "SPACK" + else: + self.pack_arg = self.pack_arg.upper() def add(self, src, archive): """ @@ -987,8 +991,8 @@ def archive_targets(self): """ if self.use_adrdssu: source, changed = self._create_dest_data_set( - type="SEQ", - record_format="U", + type="seq", + record_format="u", record_length=0, tmp_hlq=self.tmphlq, replace=True, @@ -1006,8 +1010,8 @@ def archive_targets(self): dest, changed = self._create_dest_data_set( name=self.dest, replace=True, - type='SEQ', - record_format='FB', + type='seq', + record_format='fb', record_length=AMATERSE_RECORD_LENGTH, space_primary=self.dest_data_set.get("space_primary"), space_type=self.dest_data_set.get("space_type")) @@ -1056,8 +1060,8 @@ def archive_targets(self): """ if self.use_adrdssu: source, changed = self._create_dest_data_set( - type="SEQ", - record_format="U", + type="seq", + record_format="u", record_length=0, tmp_hlq=self.tmphlq, replace=True, @@ -1075,8 +1079,8 @@ def archive_targets(self): dest, changed = self._create_dest_data_set( name=self.dest, replace=True, - type='SEQ', - record_format='FB', + type='seq', + record_format='fb', record_length=XMIT_RECORD_LENGTH, space_primary=self.dest_data_set.get("space_primary"), space_type=self.dest_data_set.get("space_type")) @@ -1137,7 +1141,7 @@ def run_module(): options=dict( terse_pack=dict( type='str', - choices=['PACK', 'SPACK'], + choices=['pack', 'spack'], ), xmit_log_data_set=dict( type='str', @@ -1163,9 +1167,9 @@ def run_module(): ), type=dict( type='str', - choices=['SEQ'], + choices=['seq'], required=False, - default="SEQ", + default="seq", ), space_primary=dict( type='int', required=False), @@ -1173,12 +1177,12 @@ def run_module(): type='int', required=False), space_type=dict( type='str', - choices=['K', 'M', 'G', 'CYL', 'TRK'], + choices=['k', 'm', 'g', 'cyl', 'trk'], required=False, ), record_format=dict( type='str', - choices=["FB", "VB", "FBA", "VBA", "U"], + choices=["fb", "vb", "fba", "vba", "u"], required=False ), record_length=dict(type='int', required=False), @@ -1214,7 +1218,7 @@ def run_module(): terse_pack=dict( type='str', required=False, - choices=['PACK', 'SPACK'], + choices=['pack', 'spack'], ), xmit_log_data_set=dict( type='str', @@ -1226,7 +1230,7 @@ def run_module(): ) ), default=dict( - terse_pack="SPACK", + terse_pack="spack", xmit_log_data_set="", use_adrdssu=False), ), @@ -1234,7 +1238,7 @@ def run_module(): default=dict( name="", format_options=dict( - terse_pack="SPACK", + terse_pack="spack", xmit_log_data_set="", use_adrdssu=False ) @@ -1249,7 +1253,7 @@ def run_module(): required=False, options=dict( name=dict(arg_type='str', required=False), - type=dict(arg_type='str', required=False, default="SEQ"), + type=dict(arg_type='str', required=False, default="seq"), space_primary=dict(arg_type='int', required=False), space_secondary=dict( arg_type='int', required=False), diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 3185652e1..a112da247 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -168,15 +168,15 @@ space_type: description: - The unit of measurement to use when defining data set space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). - - When I(full_volume=True), I(space_type) defaults to C(G), otherwise default is C(M) + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). + - When I(full_volume=True), I(space_type) defaults to C(g), otherwise default is C(m) type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false aliases: - unit @@ -233,7 +233,7 @@ include: user.** backup_name: MY.BACKUP.DZP space: 100 - space_type: M + space_type: m - name: Backup all datasets matching the pattern USER.** that are present on the volume MYVOL1 to data set MY.BACKUP.DZP, @@ -245,7 +245,7 @@ volume: MYVOL1 backup_name: MY.BACKUP.DZP space: 100 - space_type: M + space_type: m - name: Backup an entire volume, MYVOL1, to the UNIX file /tmp/temp_backup.dzp, allocate 1GB for data sets used in backup process. @@ -255,7 +255,7 @@ volume: MYVOL1 full_volume: yes space: 1 - space_type: G + space_type: g - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. Use z/OS username as new HLQ. @@ -299,7 +299,7 @@ full_volume: yes backup_name: MY.BACKUP.DZP space: 1 - space_type: G + space_type: g - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. Specify DB2SMS10 for the SMS storage and management classes to use for the restored @@ -346,7 +346,7 @@ def main(): ), ), space=dict(type="int", required=False, aliases=["size"]), - space_type=dict(type="str", required=False, aliases=["unit"], choices=["K", "M", "G", "CYL", "TRK"]), + space_type=dict(type="str", required=False, aliases=["unit"], choices=["k", "m", "g", "cyl", "trk"]), volume=dict(type="str", required=False), full_volume=dict(type="bool", default=False), temp_volume=dict(type="str", required=False, aliases=["dest_volume"]), @@ -709,12 +709,12 @@ def space_type_type(contents, dependencies): """ if contents is None: if dependencies.get("full_volume"): - return "G" + return "g" else: - return "M" - if not match(r"^(M|G|K|TRK|CYL)$", contents, IGNORECASE): + return "m" + if not match(r"^(m|g|k|trk|cyl)$", contents, IGNORECASE): raise ValueError( - 'Value {0} is invalid for space_type argument. Valid space types are "K", "M", "G", "TRK" or "CYL".'.format( + 'Value {0} is invalid for space_type argument. Valid space types are "k", "m", "g", "trk" or "cyl".'.format( contents ) ) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 9acb3c1c6..da29f688a 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -347,16 +347,16 @@ type: str required: true choices: - - KSDS - - ESDS - - RRDS - - LDS - - SEQ - - PDS - - PDSE - - MEMBER - - BASIC - - LIBRARY + - ksds + - esds + - rrds + - lds + - seq + - pds + - pdse + - member + - basic + - library space_primary: description: - If the destination I(dest) data set does not exist , this sets the @@ -375,27 +375,27 @@ description: - If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false record_format: description: - If the destination data set does not exist, this sets the format of the - data set. (e.g C(FB)) - - Choices are case-insensitive. + data set. (e.g C(fb)) + - Choices are case-sensitive. required: false choices: - - FB - - VB - - FBA - - VBA - - U + - fb + - vb + - fba + - vba + - u type: str record_length: description: @@ -417,15 +417,15 @@ key_offset: description: - The key offset to use when creating a KSDS data set. - - I(key_offset) is required when I(type=KSDS). - - I(key_offset) should only be provided when I(type=KSDS) + - I(key_offset) is required when I(type=ksds). + - I(key_offset) should only be provided when I(type=ksds) type: int required: false key_length: description: - The key length to use when creating a KSDS data set. - - I(key_length) is required when I(type=KSDS). - - I(key_length) should only be provided when I(type=KSDS) + - I(key_length) is required when I(type=ksds). + - I(key_length) should only be provided when I(type=ksds) type: int required: false sms_storage_class: @@ -642,11 +642,11 @@ remote_src: true volume: '222222' dest_data_set: - type: SEQ + type: seq space_primary: 10 space_secondary: 3 - space_type: K - record_format: VB + space_type: k + record_format: vb record_length: 150 - name: Copy a Program Object and its aliases on a remote system to a new PDSE member MYCOBOL @@ -702,7 +702,7 @@ description: Record format of the dataset. type: str - sample: FB + sample: fb record_length: description: Record length of the dataset. @@ -722,21 +722,21 @@ description: Unit of measurement for space. type: str - sample: K + sample: k type: description: Type of dataset allocated. type: str - sample: PDSE + sample: pdse sample: { "block_size": 32760, - "record_format": "FB", + "record_format": "fb", "record_length": 45, "space_primary": 2, "space_secondary": 1, - "space_type": "K", - "type": "PDSE" + "space_type": "k", + "type": "pdse" } checksum: description: SHA256 checksum of the file after running zos_copy. @@ -2802,7 +2802,7 @@ def run_module(module, arg_def): # dest_data_set.type overrides `dest_ds_type` given precedence rules if dest_data_set and dest_data_set.get("type"): - dest_ds_type = dest_data_set.get("type") + dest_ds_type = dest_data_set.get("type").upper() elif executable: """ When executable is selected and dest_exists is false means an executable PDSE was copied to remote, so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY. @@ -2810,16 +2810,7 @@ def run_module(module, arg_def): and LIBRARY is not in MVS_PARTITIONED frozen set.""" dest_ds_type = "PDSE" - if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): - dest_has_asa_chars = True - elif not dest_exists and asa_text: - dest_has_asa_chars = True - elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: - dest_attributes = datasets.list_datasets(dest_name)[0] - if dest_attributes.record_format == 'FBA' or dest_attributes.record_format == 'VBA': - dest_has_asa_chars = True - - if dest_data_set and (dest_data_set.get('record_format', '') == 'FBA' or dest_data_set.get('record_format', '') == 'VBA'): + if dest_data_set and (dest_data_set.get('record_format', '') == 'fba' or dest_data_set.get('record_format', '') == 'vba'): dest_has_asa_chars = True elif not dest_exists and asa_text: dest_has_asa_chars = True @@ -3177,8 +3168,8 @@ def main(): options=dict( type=dict( type='str', - choices=['BASIC', 'KSDS', 'ESDS', 'RRDS', - 'LDS', 'SEQ', 'PDS', 'PDSE', 'MEMBER', 'LIBRARY'], + choices=['basic', 'ksds', 'esds', 'rrds', + 'lds', 'seq', 'pds', 'pdse', 'member', 'library'], required=True, ), space_primary=dict( @@ -3187,12 +3178,12 @@ def main(): type='int', required=False), space_type=dict( type='str', - choices=['K', 'M', 'G', 'CYL', 'TRK'], + choices=['k', 'm', 'g', 'cyl', 'trk'], required=False, ), record_format=dict( type='str', - choices=["FB", "VB", "FBA", "VBA", "U"], + choices=["fb", "vb", "fba", "vba", "u"], required=False ), record_length=dict(type='int', required=False), diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 1969462c3..446fd6fe7 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -33,7 +33,7 @@ - The name of the data set being managed. (e.g C(USER.TEST)) - If I(name) is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - - Required if I(type=MEMBER) or I(state!=present) and not using I(batch). + - Required if I(type=member) or I(state!=present) and not using I(batch). type: str required: false state: @@ -46,7 +46,7 @@ If I(state=absent) and the data set does exist on the managed node, remove the data set, module completes successfully with I(changed=True). - > - If I(state=absent) and I(type=MEMBER) and I(force=True), the data set + If I(state=absent) and I(type=member) and I(force=True), the data set will be opened with I(DISP=SHR) such that the entire data set can be accessed by other processes while the specified member is deleted. - > @@ -77,7 +77,7 @@ If I(state=present) and I(replace=False) and the data set is present on the managed node, no action taken, module completes successfully with I(changed=False). - > - If I(state=present) and I(type=MEMBER) and the member does not exist in the data set, + If I(state=present) and I(type=member) and the member does not exist in the data set, create a member formatted to store data, module completes successfully with I(changed=True). Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, @@ -109,26 +109,26 @@ - uncataloged type: description: - - The data set type to be used when creating a data set. (e.g C(pdse)) - - C(MEMBER) expects to be used with an existing partitioned data set. + - The data set type to be used when creating a data set. (e.g C(pdse)). + - C(member) expects to be used with an existing partitioned data set. - Choices are case-sensitive. required: false type: str choices: - - KSDS - - ESDS - - RRDS - - LDS - - SEQ - - PDS - - PDSE - - LIBRARY - - BASIC - - LARGE - - MEMBER - - HFS - - ZFS - default: PDS + - ksds + - esds + - rrds + - lds + - seq + - pds + - pdse + - library + - basic + - large + - member + - hfs + - zfs + default: pds space_primary: description: - The amount of primary space to allocate for the dataset. @@ -146,33 +146,33 @@ space_type: description: - The unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false - default: M + default: m record_format: description: - The format of the data set. (e.g C(FB)) - Choices are case-sensitive. - - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or I(type=ZFS) + - When I(type=ksds), I(type=esds), I(type=rrds), I(type=lds) or I(type=zfs) then I(record_format=None), these types do not have a default I(record_format). required: false choices: - - FB - - VB - - FBA - - VBA - - U - - F + - fb + - vb + - fba + - vba + - u + - f type: str - default: FB + default: fb aliases: - format sms_storage_class: @@ -221,15 +221,15 @@ key_offset: description: - The key offset to use when creating a KSDS data set. - - I(key_offset) is required when I(type=KSDS). - - I(key_offset) should only be provided when I(type=KSDS) + - I(key_offset) is required when I(type=ksds). + - I(key_offset) should only be provided when I(type=ksds) type: int required: false key_length: description: - The key length to use when creating a KSDS data set. - - I(key_length) is required when I(type=KSDS). - - I(key_length) should only be provided when I(type=KSDS) + - I(key_length) is required when I(type=ksds). + - I(key_length) should only be provided when I(type=ksds) type: int required: false volumes: @@ -281,7 +281,7 @@ - The I(force=True) option enables sharing of data sets through the disposition I(DISP=SHR). - The I(force=True) only applies to data set members when I(state=absent) - and I(type=MEMBER). + and I(type=member). type: bool required: false default: false @@ -297,7 +297,7 @@ - The name of the data set being managed. (e.g C(USER.TEST)) - If I(name) is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - - Required if I(type=MEMBER) or I(state!=present) + - Required if I(type=member) or I(state!=present) type: str required: false state: @@ -310,7 +310,7 @@ If I(state=absent) and the data set does exist on the managed node, remove the data set, module completes successfully with I(changed=True). - > - If I(state=absent) and I(type=MEMBER) and I(force=True), the data + If I(state=absent) and I(type=member) and I(force=True), the data set will be opened with I(DISP=SHR) such that the entire data set can be accessed by other processes while the specified member is deleted. @@ -342,7 +342,7 @@ If I(state=present) and I(replace=False) and the data set is present on the managed node, no action taken, module completes successfully with I(changed=False). - > - If I(state=present) and I(type=MEMBER) and the member does not exist in the data set, + If I(state=present) and I(type=member) and the member does not exist in the data set, create a member formatted to store data, module completes successfully with I(changed=True). Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, @@ -374,26 +374,26 @@ - uncataloged type: description: - - The data set type to be used when creating a data set. (e.g C(PDSE)) - - C(MEMBER) expects to be used with an existing partitioned data set. + - The data set type to be used when creating a data set. (e.g C(pdse)) + - C(member) expects to be used with an existing partitioned data set. - Choices are case-sensitive. required: false type: str choices: - - KSDS - - ESDS - - RRDS - - LDS - - SEQ - - PDS - - PDSE - - LIBRARY - - BASIC - - LARGE - - MEMBER - - HFS - - ZFS - default: PDS + - ksds + - esds + - rrds + - lds + - seq + - pds + - pdse + - library + - basic + - large + - member + - hfs + - zfs + default: pds space_primary: description: - The amount of primary space to allocate for the dataset. @@ -411,33 +411,33 @@ space_type: description: - The unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false - default: M + default: m record_format: description: - The format of the data set. (e.g C(FB)) - Choices are case-sensitive. - - When I(type=KSDS), I(type=ESDS), I(type=RRDS), I(type=LDS) or - I(type=ZFS) then I(record_format=None), these types do not have a + - When I(type=ksds), I(type=esds), I(type=rrds), I(type=lds) or + I(type=zfs) then I(record_format=None), these types do not have a default I(record_format). required: false choices: - - FB - - VB - - FBA - - VBA - - U - - F + - fb + - vb + - fba + - vba + - u + - f type: str - default: FB + default: fb aliases: - format sms_storage_class: @@ -486,15 +486,15 @@ key_offset: description: - The key offset to use when creating a KSDS data set. - - I(key_offset) is required when I(type=KSDS). - - I(key_offset) should only be provided when I(type=KSDS) + - I(key_offset) is required when I(type=ksds). + - I(key_offset) should only be provided when I(type=ksds) type: int required: false key_length: description: - The key length to use when creating a KSDS data set. - - I(key_length) is required when I(type=KSDS). - - I(key_length) should only be provided when I(type=KSDS) + - I(key_length) is required when I(type=ksds). + - I(key_length) should only be provided when I(type=ksds) type: int required: false volumes: @@ -539,7 +539,7 @@ - The I(force=True) option enables sharing of data sets through the disposition I(DISP=SHR). - The I(force=True) only applies to data set members when - I(state=absent) and I(type=MEMBER). + I(state=absent) and I(type=member). type: bool required: false default: false @@ -549,7 +549,7 @@ - name: Create a sequential data set if it does not exist zos_data_set: name: someds.name.here - type: SEQ + type: seq state: present - name: Create a PDS data set if it does not exist @@ -557,27 +557,27 @@ name: someds.name.here type: pds space_primary: 5 - space_type: M - record_format: FBA + space_type: m + record_format: fba record_length: 25 - name: Attempt to replace a data set if it exists zos_data_set: name: someds.name.here - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: U + space_type: m + record_format: u record_length: 25 replace: yes - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: name: someds.name.here - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: U + space_type: m + record_format: u record_length: 25 volumes: "222222" replace: yes @@ -585,19 +585,19 @@ - name: Create an ESDS data set if it does not exist zos_data_set: name: someds.name.here - type: ESDS + type: esds - name: Create a KSDS data set if it does not exist zos_data_set: name: someds.name.here - type: KSDS + type: ksds key_length: 8 key_offset: 0 - name: Create an RRDS data set with storage class MYDATA if it does not exist zos_data_set: name: someds.name.here - type: RRDS + type: rrds sms_storage_class: mydata - name: Delete a data set if it exists @@ -614,43 +614,43 @@ - name: Write a member to an existing PDS; replace if member exists zos_data_set: name: someds.name.here(mydata) - type: MEMBER + type: member replace: yes - name: Write a member to an existing PDS; do not replace if member exists zos_data_set: name: someds.name.here(mydata) - type: MEMBER + type: member - name: Remove a member from an existing PDS zos_data_set: name: someds.name.here(mydata) state: absent - type: MEMBER + type: member - name: Remove a member from an existing PDS/E by opening with disposition DISP=SHR zos_data_set: name: someds.name.here(mydata) state: absent - type: MEMBER + type: member force: yes - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: - name: someds.name.here1 - type: PDS + type: pds space_primary: 5 - space_type: M - record_format: FB + space_type: m + record_format: fb replace: yes - name: someds.name.here1(member1) - type: MEMBER + type: member - name: someds.name.here2(member1) - type: MEMBER + type: member replace: yes - name: someds.name.here2(member2) - type: MEMBER + type: member - name: Catalog a data set present on volume 222222 if it is uncataloged. zos_data_set: @@ -689,44 +689,44 @@ # CONSTANTS DATA_SET_TYPES = [ - "KSDS", - "ESDS", - "RRDS", - "LDS", - "SEQ", - "PDS", - "PDSE", - "BASIC", - "LARGE", - "LIBRARY", - "MEMBER", - "HFS", - "ZFS", + "ksds", + "esds", + "rrds", + "lds", + "seq", + "pds", + "pdse", + "basic", + "large", + "library", + "member", + "hfs", + "zfs", ] DATA_SET_FORMATS = [ - "FB", - "VB", - "FBA", - "VBA", - "U", - "F", + "fb", + "vb", + "fba", + "vba", + "u", + "f", ] DEFAULT_RECORD_LENGTHS = { - "FB": 80, - "FBA": 80, - "VB": 137, - "VBA": 137, - "U": 0, + "fb": 80, + "fba": 80, + "vb": 137, + "vba": 137, + "u": 0, } DATA_SET_TYPES_VSAM = [ - "KSDS", - "ESDS", - "RRDS", - "LDS", - "ZFS", + "ksds", + "esds", + "rrds", + "lds", + "zfs", ] # ------------- Functions to validate arguments ------------- # @@ -775,14 +775,14 @@ def data_set_name(contents, dependencies): if contents is None: if dependencies.get("state") != "present": raise ValueError('Data set name must be provided when "state!=present"') - if dependencies.get("type") != "MEMBER": + if dependencies.get("type") != "member": tmphlq = dependencies.get("tmp_hlq") if tmphlq is None: tmphlq = "" contents = DataSet.temp_name(tmphlq) else: raise ValueError( - 'Data set and member name must be provided when "type=MEMBER"' + 'Data set and member name must be provided when "type=member"' ) dsname = str(contents) if not re.fullmatch( @@ -796,7 +796,7 @@ def data_set_name(contents, dependencies): dsname, re.IGNORECASE, ) - and dependencies.get("type") == "MEMBER" + and dependencies.get("type") == "member" ): raise ValueError( "Value {0} is invalid for data set argument.".format(dsname) @@ -809,13 +809,13 @@ def space_type(contents, dependencies): """Validates provided data set unit of space is valid. Returns the unit of space.""" if dependencies.get("state") == "absent": - return "M" + return "m" if contents is None: return None - match = re.fullmatch(r"(M|G|K|TRK|CYL)", contents, re.IGNORECASE) + match = re.fullmatch(r"(m|g|k|trk|cyl)", contents, re.IGNORECASE) if not match: raise ValueError( - 'Value {0} is invalid for space_type argument. Valid space types are "K", "M", "G", "TRK" or "CYL".'.format( + 'Value {0} is invalid for space_type argument. Valid space types are "k", "m", "g", "trk" or "cyl".'.format( contents ) ) @@ -872,12 +872,11 @@ def record_length(contents, dependencies): # * dependent on state # * dependent on record_length def record_format(contents, dependencies): - """Validates data set format is valid. - Returns uppercase data set format.""" + """Validates data set format is valid.""" if dependencies.get("state") == "absent": - return "FB" + return "fb" if contents is None: - return "FB" + return "fb" formats = "|".join(DATA_SET_FORMATS) if not re.fullmatch(formats, contents, re.IGNORECASE): raise ValueError( @@ -885,17 +884,16 @@ def record_format(contents, dependencies): contents, ", ".join(DATA_SET_FORMATS) ) ) - return contents.upper() + return contents # * dependent on state def data_set_type(contents, dependencies): - """Validates data set type is valid. - Returns uppercase data set type.""" - # if dependencies.get("state") == "absent" and contents != "MEMBER": + """Validates data set type is valid.""" + # if dependencies.get("state") == "absent" and contents != "member": # return None if contents is None: - return "PDS" + return "pds" types = "|".join(DATA_SET_TYPES) if not re.fullmatch(types, contents, re.IGNORECASE): raise ValueError( @@ -903,7 +901,7 @@ def data_set_type(contents, dependencies): contents, ", ".join(DATA_SET_TYPES) ) ) - return contents.upper() + return contents # * dependent on state @@ -936,10 +934,10 @@ def key_length(contents, dependencies): Returns data set key length as integer.""" if dependencies.get("state") == "absent": return None - if dependencies.get("type") == "KSDS" and contents is None: + if dependencies.get("type") == "ksds" and contents is None: raise ValueError("key_length is required when requesting KSDS data set.") - if dependencies.get("type") != "KSDS" and contents is not None: - raise ValueError("key_length is only valid when type=KSDS.") + if dependencies.get("type") != "ksds" and contents is not None: + raise ValueError("key_length is only valid when type=ksds.") if contents is None: return None contents = int(contents) @@ -958,10 +956,10 @@ def key_offset(contents, dependencies): Returns data set key offset as integer.""" if dependencies.get("state") == "absent": return None - if dependencies.get("type") == "KSDS" and contents is None: + if dependencies.get("type") == "ksds" and contents is None: raise ValueError("key_offset is required when requesting KSDS data set.") - if dependencies.get("type") != "KSDS" and contents is not None: - raise ValueError("key_offset is only valid when type=KSDS.") + if dependencies.get("type") != "ksds" and contents is not None: + raise ValueError("key_offset is only valid when type=ksds.") if contents is None: return None contents = int(contents) @@ -981,13 +979,13 @@ def perform_data_set_operations(name, state, **extra_args): # passing in **extra_args forced me to modify the acceptable parameters # for multiple functions in data_set.py including ensure_present, replace # and create where the force parameter has no bearing. - if state == "present" and extra_args.get("type") != "MEMBER": + if state == "present" and extra_args.get("type") != "member": changed = DataSet.ensure_present(name, **extra_args) - elif state == "present" and extra_args.get("type") == "MEMBER": + elif state == "present" and extra_args.get("type") == "member": changed = DataSet.ensure_member_present(name, extra_args.get("replace")) - elif state == "absent" and extra_args.get("type") != "MEMBER": + elif state == "absent" and extra_args.get("type") != "member": changed = DataSet.ensure_absent(name, extra_args.get("volumes")) - elif state == "absent" and extra_args.get("type") == "MEMBER": + elif state == "absent" and extra_args.get("type") == "member": changed = DataSet.ensure_member_absent(name, extra_args.get("force")) elif state == "cataloged": changed = DataSet.ensure_cataloged(name, extra_args.get("volumes")) @@ -1024,8 +1022,8 @@ def parse_and_validate_args(params): type=space_type, required=False, dependencies=["state"], - choices=["K", "M", "G", "CYL", "TRK"], - default="M", + choices=["k", "m", "g", "cyl", "trk"], + default="m", ), space_primary=dict(type="int", required=False, dependencies=["state"]), space_secondary=dict( @@ -1035,9 +1033,9 @@ def parse_and_validate_args(params): type=record_format, required=False, dependencies=["state"], - choices=["FB", "VB", "FBA", "VBA", "U", "F"], + choices=["fb", "vb", "fba", "vba", "u", "f"], aliases=["format"], - default="FB", + default="fb", ), sms_management_class=dict( type=sms_class, required=False, dependencies=["state"] @@ -1113,8 +1111,8 @@ def parse_and_validate_args(params): type=space_type, required=False, dependencies=["state"], - choices=["K", "M", "G", "CYL", "TRK"], - default="M", + choices=["k", "m", "g", "cyl", "trk"], + default="m", ), space_primary=dict(type="int", required=False, dependencies=["state"]), space_secondary=dict(type="int", required=False, dependencies=["state"]), @@ -1122,9 +1120,9 @@ def parse_and_validate_args(params): type=record_format, required=False, dependencies=["state"], - choices=["FB", "VB", "FBA", "VBA", "U", "F"], + choices=["fb", "vb", "fba", "vba", "u", "f"], aliases=["format"], - default="FB", + default="fb", ), sms_management_class=dict( type=sms_class, required=False, dependencies=["state"] @@ -1224,14 +1222,14 @@ def run_module(): type=dict( type="str", required=False, - default="PDS", + default="pds", choices=DATA_SET_TYPES, ), space_type=dict( type="str", required=False, - default="M", - choices=["K", "M", "G", "CYL", "TRK"], + default="m", + choices=["k", "m", "g", "cyl", "trk"], ), space_primary=dict(type="int", required=False, default=5), space_secondary=dict(type="int", required=False, default=3), @@ -1239,8 +1237,8 @@ def run_module(): type="str", required=False, aliases=["format"], - default="FB", - choices=["FB", "VB", "FBA", "VBA", "U", "F"], + default="fb", + choices=["fb", "vb", "fba", "vba", "u", "f"], ), sms_management_class=dict(type="str", required=False), # I know this alias is odd, ZOAU used to document they supported @@ -1289,14 +1287,14 @@ def run_module(): type=dict( type="str", required=False, - default="PDS", + default="pds", choices=DATA_SET_TYPES, ), space_type=dict( type="str", required=False, - default="M", - choices=["K", "M", "G", "CYL", "TRK"], + default="m", + choices=["k", "m", "g", "cyl", "trk"], ), space_primary=dict(type="int", required=False, default=5), space_secondary=dict(type="int", required=False, default=3), @@ -1304,8 +1302,8 @@ def run_module(): type="str", required=False, aliases=["format"], - choices=["FB", "VB", "FBA", "VBA", "U", "F"], - default="FB" + choices=["fb", "vb", "fba", "vba", "u", "f"], + default="fb" ), sms_management_class=dict(type="str", required=False), # I know this alias is odd, ZOAU used to document they supported @@ -1357,7 +1355,7 @@ def run_module(): # This section is copied down inside if/check_mode false, so it modifies after the arg parser if module.params.get("batch") is not None: for entry in module.params.get("batch"): - if entry.get('type') is not None and entry.get("type").upper() in DATA_SET_TYPES_VSAM: + if entry.get('type') is not None and entry.get("type") in DATA_SET_TYPES_VSAM: entry["record_format"] = None if module.params.get("type") is not None: module.params["type"] = None @@ -1374,7 +1372,7 @@ def run_module(): if module.params.get("record_format") is not None: module.params["record_format"] = None elif module.params.get("type") is not None: - if module.params.get("type").upper() in DATA_SET_TYPES_VSAM: + if module.params.get("type") in DATA_SET_TYPES_VSAM: # For VSAM types set the value to nothing and let the code manage it # module.params["record_format"] = None if module.params.get("record_format") is not None: @@ -1394,7 +1392,7 @@ def run_module(): # This *appears* redundant, bit the parse_and_validate reinforces the default value for record_type if data_set_params.get("batch") is not None: for entry in data_set_params.get("batch"): - if entry.get('type') is not None and entry.get("type").upper() in DATA_SET_TYPES_VSAM: + if entry.get('type') is not None and entry.get("type") in DATA_SET_TYPES_VSAM: entry["record_format"] = None if data_set_params.get("type") is not None: data_set_params["type"] = None @@ -1411,7 +1409,7 @@ def run_module(): if data_set_params.get("record_format") is not None: data_set_params["record_format"] = None else: - if data_set_params.get("type").upper() in DATA_SET_TYPES_VSAM: + if data_set_params.get("type") in DATA_SET_TYPES_VSAM: if data_set_params.get("record_format") is not None: data_set_params["record_format"] = None diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 7c66c2543..1b56f459d 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -42,17 +42,17 @@ (e.g "/User/tester/ansible-playbook/sample.jcl") location: required: false - default: DATA_SET + default: data_set type: str choices: - - DATA_SET - - USS - - LOCAL + - data_set + - uss + - local description: - - The JCL location. Supported choices are ``DATA_SET``, ``USS`` or ``LOCAL``. - - DATA_SET can be a PDS, PDSE, or sequential data set. - - USS means the JCL location is located in UNIX System Services (USS). - - LOCAL means locally to the ansible control node. + - The JCL location. Supported choices are C(data_set), C(uss) or C(local). + - C(data_set) can be a PDS, PDSE, or sequential data set. + - C(uss) means the JCL location is located in UNIX System Services (USS). + - C(local) means locally to the ansible control node. wait_time_s: required: false default: 10 @@ -80,17 +80,17 @@ required: false type: str description: - - The volume serial (VOLSER)is where the data set resides. The option + - The volume serial (VOLSER) is where the data set resides. The option is required only when the data set is not cataloged on the system. - When configured, the L(zos_job_submit,./zos_job_submit.html) will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - - Ignored for I(location=USS) and I(location=LOCAL). + - Ignored for I(location=uss) and I(location=local). encoding: description: - Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - - This option is only supported for when I(location=LOCAL). + - This option is only supported for when I(location=local). - If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system @@ -561,19 +561,19 @@ - name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: DATA_SET + location: data_set register: response - name: Submit JCL in USS with no DDs in the output. zos_job_submit: src: /u/tester/demo/sample.jcl - location: USS + location: uss return_output: false - name: Convert local JCL to IBM-037 and submit the job. zos_job_submit: src: /Users/maxy/ansible-playbooks/provision/sample.jcl - location: LOCAL + location: local encoding: from: ISO8859-1 to: IBM-037 @@ -581,25 +581,25 @@ - name: Submit JCL in an uncataloged PDSE on volume P2SS01. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: DATA_SET + location: data_set volume: P2SS01 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: DATA_SET + location: data_set wait_time_s: 30 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: DATA_SET + location: data_set wait_time_s: 30 - name: Submit JCL and set the max return code the module should fail on to 16. zos_job_submit: src: HLQ.DATA.LLQ - location: DATA_SET + location: data_set max_rc: 16 """ @@ -805,8 +805,8 @@ def run_module(): src=dict(type="str", required=True), location=dict( type="str", - default="DATA_SET", - choices=["DATA_SET", "USS", "LOCAL"], + default="data_set", + choices=["data_set", "uss", "local"], ), encoding=dict( type="dict", @@ -875,8 +875,8 @@ def run_module(): src=dict(arg_type="data_set_or_path", required=True), location=dict( arg_type="str", - default="DATA_SET", - choices=["DATA_SET", "USS", "LOCAL"], + default="data_set", + choices=["data_set", "uss", "local"], ), from_encoding=dict( arg_type="encoding", default=Defaults.DEFAULT_ASCII_CHARSET, required=False), @@ -907,7 +907,7 @@ def run_module(): return_output = parsed_args.get("return_output") wait_time_s = parsed_args.get("wait_time_s") max_rc = parsed_args.get("max_rc") - temp_file = parsed_args.get("src") if location == "LOCAL" else None + temp_file = parsed_args.get("src") if location == "local" else None # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) @@ -921,13 +921,13 @@ def run_module(): job_submitted_id = None duration = 0 start_time = timer() - if location == "DATA_SET": + if location == "data_set": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=False, volume=volume, start_time=start_time) - elif location == "USS": + elif location == "uss": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=True) - elif location == "LOCAL": + elif location == "local": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=True) diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 3f4c642f3..61ca20b9f 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -48,13 +48,13 @@ description: - The type of file system that will be mounted. - The physical file systems data set format to perform the logical mount. - - The I(fs_type) is required to be uppercase. + - The I(fs_type) is required to be lowercase. type: str choices: - - HFS - - ZFS - - NFS - - TFS + - hfs + - zfs + - nfs + - tfs required: True state: description: @@ -168,33 +168,33 @@ file hierarchy). type: str choices: - - DRAIN - - FORCE - - IMMEDIATE - - NORMAL - - REMOUNT - - RESET + - drain + - force + - immediate + - normal + - remount + - reset required: False - default: NORMAL + default: normal mount_opts: description: - Options available to the mount. - - If I(mount_opts=RO) on a mounted/remount, mount is performed + - If I(mount_opts=ro) on a mounted/remount, mount is performed read-only. - - If I(mount_opts=SAME) and (unmount_opts=REMOUNT), mount is opened + - If I(mount_opts=same) and (unmount_opts=remount), mount is opened in the same mode as previously opened. - - If I(mount_opts=NOWAIT), mount is performed asynchronously. - - If I(mount_opts=NOSECURITY), security checks are not enforced for + - If I(mount_opts=nowait), mount is performed asynchronously. + - If I(mount_opts=nosecurity), security checks are not enforced for files in this file system. type: str choices: - - RO - - RW - - SAME - - NOWAIT - - NOSECURITY + - ro + - rw + - same + - nowait + - nosecurity required: False - default: RW + default: rw src_params: description: - Specifies a parameter string to be passed to the file system type. @@ -206,15 +206,15 @@ description: - If present, tags get written to any untagged file. - When the file system is unmounted, the tags are lost. - - If I(tag_untagged=NOTEXT) none of the untagged files in the file system are + - If I(tag_untagged=notext) none of the untagged files in the file system are automatically converted during file reading and writing. - - If I(tag_untagged=TEXT) each untagged file is implicitly marked as + - If I(tag_untagged=text) each untagged file is implicitly marked as containing pure text data that can be converted. - If this flag is used, use of tag_ccsid is encouraged. type: str choices: - - TEXT - - NOTEXT + - text + - notext required: False tag_ccsid: description: @@ -271,23 +271,23 @@ AUTOMOVE where the file system will be randomly moved to another system (no system list used). - > - I(automove=AUTOMOVE) indicates that ownership of the file system can be + I(automove=automove) indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. - > - I(automove=NOAUTOMOVE) prevents movement of the file system's ownership in some situations. + I(automove=noautomove) prevents movement of the file system's ownership in some situations. - > - I(automove=UNMOUNT) allows the file system to be unmounted in some situations. + I(automove=unmount) allows the file system to be unmounted in some situations. type: str choices: - - AUTOMOVE - - NOAUTOMOVE - - UNMOUNT + - automove + - noautomove + - unmount required: False - default: AUTOMOVE + default: automove automove_list: description: - > - If(automove=AUTOMOVE), this option will be checked. + If(automove=automove), this option will be checked. - > This specifies the list of servers to include or exclude as destinations. - > @@ -317,14 +317,14 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted - name: Unmount a filesystem. zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: unmounted unmount_opts: REMOUNT opts: same @@ -333,7 +333,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted mount_opts: RO @@ -341,7 +341,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted persistent: data_store: SYS1.PARMLIB(BPXPRMAA) @@ -351,7 +351,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted persistent: data_store: SYS1.PARMLIB(BPXPRMAA) @@ -363,7 +363,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted allow_uid: no @@ -371,7 +371,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted opts: nowait @@ -379,7 +379,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted mount_opts: NOSECURITY @@ -387,7 +387,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted automove: AUTOMOVE automove_list: I,DEV1,DEV2,DEV3,DEV9 @@ -396,7 +396,7 @@ zos_mount: src: SOMEUSER.VVV.ZFS path: /u/omvsadm/core - fs_type: ZFS + fs_type: zfs state: mounted automove: AUTOMOVE automove_list: EXCLUDE,DEV4,DEV5,DEV6,DEV7 @@ -854,7 +854,7 @@ def run_module(module, arg_def): src, path, fs_type ) ) - if "RO" in mount_opts: + if "ro" in mount_opts: subcmd = "READ" else: subcmd = "RDWR" @@ -882,14 +882,14 @@ def run_module(module, arg_def): fullcmd = fullcmd + " NOSETUID" parmtext = parmtext + "\n NOSETUID" - if "NOWAIT" in mount_opts: + if "nowait" in mount_opts: fullcmd = fullcmd + " NOWAIT" parmtext = parmtext + "\n NOWAIT" else: fullcmd = fullcmd + " WAIT" parmtext = parmtext + "\n WAIT" - if "NOSECURITY" in mount_opts: + if "nosecurity" in mount_opts: fullcmd = fullcmd + " NOSECURITY" parmtext = parmtext + "\n NOSECURITY" else: @@ -1051,10 +1051,10 @@ def main(): fs_type=dict( type="str", choices=[ - "HFS", - "ZFS", - "NFS", - "TFS", + "hfs", + "zfs", + "nfs", + "tfs", ], required=True, ), @@ -1079,27 +1079,27 @@ def main(): ), unmount_opts=dict( type="str", - default="NORMAL", - choices=["DRAIN", "FORCE", "IMMEDIATE", "NORMAL", "REMOUNT", "RESET"], + default="normal", + choices=["drain", "force", "immediate", "normal", "remount", "reset"], required=False, ), mount_opts=dict( type="str", - default="RW", - choices=["RO", "RW", "SAME", "NOWAIT", "NOSECURITY"], + default="rw", + choices=["ro", "rw", "same", "nowait", "nosecurity"], required=False, ), src_params=dict(type="str", required=False), tag_untagged=dict( - type="str", choices=["TEXT", "NOTEXT"], required=False + type="str", choices=["text", "notext"], required=False ), tag_ccsid=dict(type="int", required=False), allow_uid=dict(type="bool", default=True, required=False), sysname=dict(type="str", required=False), automove=dict( type="str", - default="AUTOMOVE", - choices=["AUTOMOVE", "NOAUTOMOVE", "UNMOUNT"], + default="automove", + choices=["automove", "noautomove", "unmount"], required=False, ), automove_list=dict(type="str", required=False), @@ -1114,10 +1114,10 @@ def main(): fs_type=dict( arg_type="str", choices=[ - "HFS", - "ZFS", - "NFS", - "TFS", + "hfs", + "zfs", + "nfs", + "tfs", ], required=True, ), @@ -1139,27 +1139,27 @@ def main(): ), unmount_opts=dict( arg_type="str", - default="NORMAL", - choices=["DRAIN", "FORCE", "IMMEDIATE", "NORMAL", "REMOUNT", "RESET"], + default="normal", + choices=["drain", "force", "immediate", "normal", "remount", "reset"], required=False, ), mount_opts=dict( arg_type="str", - default="RW", - choices=["RO", "RW", "SAME", "NOWAIT", "NOSECURITY"], + default="rw", + choices=["ro", "rw", "same", "nowait", "nosecurity"], required=False, ), src_params=dict(arg_type="str", default="", required=False), tag_untagged=dict( - arg_type="str", choices=["TEXT", "NOTEXT"], required=False + arg_type="str", choices=["text", "notext"], required=False ), tag_ccsid=dict(arg_type="int", required=False), allow_uid=dict(arg_type="bool", default=True, required=False), sysname=dict(arg_type="str", default="", required=False), automove=dict( arg_type="str", - default="AUTOMOVE", - choices=["AUTOMOVE", "NOAUTOMOVE", "UNMOUNT"], + default="automove", + choices=["automove", "noautomove", "unmount"], required=False, ), automove_list=dict(arg_type="str", default="", required=False), diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index a440c31c6..bcac50a63 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -96,16 +96,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - LIBRARY - - PDS - - PDSE - - LARGE - - BASIC - - SEQ - - RRDS - - ESDS - - LDS - - KSDS + - library + - pds + - pdse + - large + - basic + - seq + - rrds + - esds + - lds + - ksds disposition: description: - I(disposition) indicates the status of a data set. @@ -125,9 +125,7 @@ choices: - delete - keep - - catlg - catalog - - uncatlg - uncatalog disposition_abnormal: description: @@ -138,32 +136,30 @@ choices: - delete - keep - - catlg - catalog - - uncatlg - uncatalog reuse: description: - - Determines if a data set should be reused if I(disposition=NEW) and if a data set with a matching name already exists. + - Determines if a data set should be reused if I(disposition=new) and if a data set with a matching name already exists. - If I(reuse=true), I(disposition) will be automatically switched to C(SHR). - If I(reuse=false), and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with I(replace). - - I(reuse) is only considered when I(disposition=NEW) + - I(reuse) is only considered when I(disposition=new) type: bool default: false replace: description: - - Determines if a data set should be replaced if I(disposition=NEW) and a data set with a matching name already exists. + - Determines if a data set should be replaced if I(disposition=new) and a data set with a matching name already exists. - If I(replace=true), the original data set will be deleted, and a new data set created. - If I(replace=false), and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with I(reuse). - - I(replace) is only considered when I(disposition=NEW) + - I(replace) is only considered when I(disposition=new) - I(replace) will result in loss of all data in the original data set unless I(backup) is specified. type: bool default: false backup: description: - - Determines if a backup should be made of an existing data set when I(disposition=NEW), I(replace=true), + - Determines if a backup should be made of an existing data set when I(disposition=new), I(replace=true), and a data set with the desired name is found. - I(backup) is only used when I(replace=true). type: bool @@ -174,12 +170,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - TRK - - CYL - - B - - K - - M - - G + - trk + - cyl + - b + - k + - m + - g space_primary: description: - The primary amount of space to allocate for a new data set. @@ -260,8 +256,8 @@ description: - How the label for the key encrypting key specified by I(label) is encoded by the Encryption Key Manager. - - I(encoding) can either be set to C(L) for label encoding, - or C(H) for hash encoding. + - I(encoding) can either be set to C(l) for label encoding, + or C(h) for hash encoding. - Maps to KEYCD1 on z/OS. type: str required: true @@ -289,8 +285,8 @@ description: - How the label for the key encrypting key specified by I(label) is encoded by the Encryption Key Manager. - - I(encoding) can either be set to C(L) for label encoding, - or C(H) for hash encoding. + - I(encoding) can either be set to C(l) for label encoding, + or C(h) for hash encoding. - Maps to KEYCD2 on z/OS. type: str required: true @@ -316,7 +312,7 @@ - The logical record length. (e.g C(80)). - For variable data sets, the length must include the 4-byte prefix area. - "Defaults vary depending on format: If FB/FBA 80, if VB/VBA 137, if U 0." - - Valid values are (1-32760 for non-vsam, 1-32761 for vsam). + - Valid values are (1-32760 for non-VSAM, 1-32761 for VSAM). - Maps to LRECL on z/OS. type: int required: false @@ -325,11 +321,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - U - - VB - - VBA - - FB - - FBA + - u + - vb + - vba + - fb + - fba return_content: description: - Determines how content should be returned to the user. @@ -505,11 +501,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - U - - VB - - VBA - - FB - - FBA + - u + - vb + - vba + - fb + - fba return_content: description: - Determines how content should be returned to the user. @@ -717,16 +713,16 @@ - Maps to DSNTYPE on z/OS. type: str choices: - - LIBRARY - - PDS - - PDSE - - LARGE - - BASIC - - SEQ - - RRDS - - ESDS - - LDS - - KSDS + - library + - pds + - pdse + - large + - basic + - seq + - rrds + - esds + - lds + - ksds disposition: description: - I(disposition) indicates the status of a data set. @@ -746,9 +742,7 @@ choices: - delete - keep - - catlg - catalog - - uncatlg - uncatalog disposition_abnormal: description: @@ -759,32 +753,30 @@ choices: - delete - keep - - catlg - catalog - - uncatlg - uncatalog reuse: description: - - Determines if data set should be reused if I(disposition=NEW) and a data set with matching name already exists. + - Determines if data set should be reused if I(disposition=new) and a data set with matching name already exists. - If I(reuse=true), I(disposition) will be automatically switched to C(SHR). - If I(reuse=false), and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with I(replace). - - I(reuse) is only considered when I(disposition=NEW) + - I(reuse) is only considered when I(disposition=new) type: bool default: false replace: description: - - Determines if data set should be replaced if I(disposition=NEW) and a data set with matching name already exists. + - Determines if data set should be replaced if I(disposition=new) and a data set with matching name already exists. - If I(replace=true), the original data set will be deleted, and a new data set created. - If I(replace=false), and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with I(reuse). - - I(replace) is only considered when I(disposition=NEW) + - I(replace) is only considered when I(disposition=new) - I(replace) will result in loss of all data in the original data set unless I(backup) is specified. type: bool default: false backup: description: - - Determines if a backup should be made of existing data set when I(disposition=NEW), I(replace=true), + - Determines if a backup should be made of existing data set when I(disposition=new), I(replace=true), and a data set with the desired name is found. - I(backup) is only used when I(replace=true). type: bool @@ -795,12 +787,12 @@ using I(space_primary) and I(space_secondary). type: str choices: - - TRK - - CYL - - B - - K - - M - - G + - trk + - cyl + - b + - k + - m + - g space_primary: description: - The primary amount of space to allocate for a new data set. @@ -881,8 +873,8 @@ description: - How the label for the key encrypting key specified by I(label) is encoded by the Encryption Key Manager. - - I(encoding) can either be set to C(L) for label encoding, - or C(H) for hash encoding. + - I(encoding) can either be set to C(l) for label encoding, + or C(h) for hash encoding. - Maps to KEYCD1 on z/OS. type: str required: true @@ -910,8 +902,8 @@ description: - How the label for the key encrypting key specified by I(label) is encoded by the Encryption Key Manager. - - I(encoding) can either be set to C(L) for label encoding, - or C(H) for hash encoding. + - I(encoding) can either be set to C(l) for label encoding, + or C(h) for hash encoding. - Maps to KEYCD2 on z/OS. type: str required: true @@ -946,11 +938,11 @@ - The format and characteristics of the records for new data set. type: str choices: - - U - - VB - - VBA - - FB - - FBA + - u + - vb + - vba + - fb + - fba return_content: description: - Determines how content should be returned to the user. @@ -988,7 +980,7 @@ path: description: - The path to an existing UNIX file. - - Or provide the path to an new created UNIX file when I(status_group=OCREAT). + - Or provide the path to an new created UNIX file when I(status_group=ocreat). - The provided path must be absolute. required: true type: str @@ -1124,11 +1116,11 @@ a UNIX file would normally be treated as a stream of bytes. type: str choices: - - U - - VB - - VBA - - FB - - FBA + - u + - vb + - vba + - fb + - fba return_content: description: - Determines how content should be returned to the user. @@ -1300,13 +1292,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: SEQ + type: seq space_primary: 5 space_secondary: 1 - space_type: M + space_type: m volumes: - "000000" - record_format: FB + record_format: fb return_content: type: text - dd_input: @@ -1324,13 +1316,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: SEQ + type: seq space_primary: 5 space_secondary: 1 - space_type: M + space_type: m volumes: - "000000" - record_format: FB + record_format: fb return_content: type: text - dd_input: @@ -1369,13 +1361,13 @@ data_set_name: mypgm.output.ds disposition: new reuse: yes - type: SEQ + type: seq space_primary: 5 space_secondary: 1 - space_type: M + space_type: m volumes: - "000000" - record_format: FB + record_format: fb return_content: type: text - dd_input: @@ -1398,15 +1390,15 @@ disposition: new replace: yes backup: yes - type: SEQ + type: seq space_primary: 5 space_secondary: 1 - space_type: M + space_type: m volumes: - "000000" - "111111" - "SCR002" - record_format: FB + record_format: fb return_content: type: text - dd_input: @@ -1641,13 +1633,13 @@ def run_module(): disposition=dict(type="str", choices=["new", "shr", "mod", "old"]), disposition_normal=dict( type="str", - choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], + choices=["delete", "keep", "catalog", "uncatalog"], ), disposition_abnormal=dict( type="str", - choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], + choices=["delete", "keep", "catalog", "uncatalog"], ), - space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), + space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type="raw"), @@ -1660,16 +1652,16 @@ def run_module(): type=dict( type="str", choices=[ - "LIBRARY", - "PDS", - "PDSE", - "SEQ", - "BASIC", - "LARGE", - "KSDS", - "RRDS", - "LDS", - "ESDS", + "library", + "pds", + "pdse", + "seq", + "basic", + "large", + "ksds", + "rrds", + "lds", + "esds", ], ), encryption_key_1=dict( @@ -1691,7 +1683,7 @@ def run_module(): key_length=dict(type="int", no_log=False), key_offset=dict(type="int", no_log=False), record_length=dict(type="int"), - record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), + record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), return_content=dict( type="dict", options=dict( @@ -1766,7 +1758,7 @@ def run_module(): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), + record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), return_content=dict( type="dict", options=dict( @@ -1884,13 +1876,13 @@ def parse_and_validate_args(params): disposition=dict(type="str", choices=["new", "shr", "mod", "old"]), disposition_normal=dict( type="str", - choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], + choices=["delete", "keep", "catalog", "uncatalog"], ), disposition_abnormal=dict( type="str", - choices=["delete", "keep", "catalog", "uncatalog", "catlg", "uncatlg"], + choices=["delete", "keep", "catalog", "uncatalog"], ), - space_type=dict(type="str", choices=["TRK", "CYL", "B", "K", "M", "G"]), + space_type=dict(type="str", choices=["trk", "cyl", "b", "k", "m", "g"]), space_primary=dict(type="int"), space_secondary=dict(type="int"), volumes=dict(type=volumes), @@ -1903,16 +1895,16 @@ def parse_and_validate_args(params): type=dict( type="str", choices=[ - "LIBRARY", - "PDS", - "PDSE", - "SEQ", - "BASIC", - "LARGE", - "KSDS", - "RRDS", - "LDS", - "ESDS", + "library", + "pds", + "pdse", + "seq", + "basic", + "large", + "ksds", + "rrds", + "lds", + "esds", ], ), encryption_key_1=dict( @@ -1936,7 +1928,7 @@ def parse_and_validate_args(params): type=key_offset, default=key_offset_default, dependencies=["type"] ), record_length=dict(type="int"), - record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), + record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), return_content=dict( type="dict", options=dict( @@ -1992,7 +1984,7 @@ def parse_and_validate_args(params): ), block_size=dict(type="int"), record_length=dict(type="int"), - record_format=dict(type="str", choices=["U", "VB", "VBA", "FB", "FBA"]), + record_format=dict(type="str", choices=["u", "vb", "vba", "fb", "fba"]), return_content=dict( type="dict", options=dict( @@ -2084,8 +2076,8 @@ def key_length(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "KSDS": - raise ValueError('key_length is only valid when "type=KSDS".') + if contents is not None and dependencies.get("type") != "ksds": + raise ValueError('key_length is only valid when "type=ksds".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( 'Invalid argument "{0}" for type "key_length".'.format(str(contents)) @@ -2105,8 +2097,8 @@ def key_offset(contents, dependencies): """ if contents is None: return contents - if contents is not None and dependencies.get("type") != "KSDS": - raise ValueError('key_offset is only valid when "type=KSDS".') + if contents is not None and dependencies.get("type") != "ksds": + raise ValueError('key_offset is only valid when "type=ksds".') if not re.fullmatch(r"[0-9]+", str(contents)): raise ValueError( @@ -2127,9 +2119,9 @@ def key_length_default(contents, dependencies): """ KEY_LENGTH = 5 length = None - if contents is None and dependencies.get("type") == "KSDS": + if contents is None and dependencies.get("type") == "ksds": length = KEY_LENGTH - elif dependencies.get("type") == "KSDS": + elif dependencies.get("type") == "ksds": length = contents return length @@ -2145,9 +2137,9 @@ def key_offset_default(contents, dependencies): """ KEY_OFFSET = 0 offset = None - if contents is None and dependencies.get("type") == "KSDS": + if contents is None and dependencies.get("type") == "ksds": offset = KEY_OFFSET - elif dependencies.get("type") == "KSDS": + elif dependencies.get("type") == "ksds": offset = contents return offset diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index aa315b3fb..31d709a3a 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -181,11 +181,11 @@ - Organization of the destination type: str required: false - default: SEQ + default: seq choices: - - SEQ - - PDS - - PDSE + - seq + - pds + - pdse space_primary: description: - If the destination I(dest) data set does not exist , this sets the @@ -204,28 +204,28 @@ description: - If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - - Valid units of size are C(K), C(M), C(G), C(CYL), and C(TRK). + - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). type: str choices: - - K - - M - - G - - CYL - - TRK + - k + - m + - g + - cyl + - trk required: false record_format: description: - If the destination data set does not exist, this sets the format of the - data set. (e.g C(FB)) - - Choices are case-insensitive. + data set. (e.g C(fb)) + - Choices are case-sensitive. required: false choices: - - FB - - VB - - FBA - - VBA - - U + - fb + - vb + - fba + - vba + - u type: str record_length: description: @@ -249,15 +249,15 @@ key_offset: description: - The key offset to use when creating a KSDS data set. - - I(key_offset) is required when I(type=KSDS). - - I(key_offset) should only be provided when I(type=KSDS) + - I(key_offset) is required when I(type=ksds). + - I(key_offset) should only be provided when I(type=ksds) type: int required: false key_length: description: - The key length to use when creating a KSDS data set. - - I(key_length) is required when I(type=KSDS). - - I(key_length) should only be provided when I(type=KSDS) + - I(key_length) is required when I(type=ksds). + - I(key_length) should only be provided when I(type=ksds) type: int required: false sms_storage_class: @@ -695,11 +695,11 @@ def _create_dest_data_set( temp_ds = datasets.tmp_name(high_level_qualifier=hlq) arguments.update(name=temp_ds) if record_format is None: - arguments.update(record_format="FB") + arguments.update(record_format="fb") if record_length is None: arguments.update(record_length=80) if type is None: - arguments.update(type="SEQ") + arguments.update(type="seq") if space_primary is None: arguments.update(space_primary=self._compute_dest_data_set_size()) arguments.pop("self") @@ -802,8 +802,8 @@ def extract_src(self): temp_ds, rc = self._create_dest_data_set(**self.dest_data_set) rc = self.unpack(self.src, temp_ds) else: - temp_ds, rc = self._create_dest_data_set(type="SEQ", - record_format="U", + temp_ds, rc = self._create_dest_data_set(type="seq", + record_format="u", record_length=0, tmp_hlq=self.tmphlq, replace=True) @@ -823,7 +823,7 @@ def _list_content(self, source): self._get_restored_datasets(out) def list_archive_content(self): - temp_ds, rc = self._create_dest_data_set(type="SEQ", record_format="U", record_length=0, tmp_hlq=self.tmphlq, replace=True) + temp_ds, rc = self._create_dest_data_set(type="seq", record_format="u", record_length=0, tmp_hlq=self.tmphlq, replace=True) self.unpack(self.src, temp_ds) self._list_content(temp_ds) datasets.delete(temp_ds) @@ -1026,9 +1026,9 @@ def run_module(): ), type=dict( type='str', - choices=['SEQ', 'PDS', 'PDSE'], + choices=['seq', 'pds', 'pdse'], required=False, - default='SEQ', + default='seq', ), space_primary=dict( type='int', required=False), @@ -1036,12 +1036,12 @@ def run_module(): type='int', required=False), space_type=dict( type='str', - choices=['K', 'M', 'G', 'CYL', 'TRK'], + choices=['k', 'm', 'g', 'cyl', 'trk'], required=False, ), record_format=dict( type='str', - choices=["FB", "VB", "FBA", "VBA", "U"], + choices=["fb", "vb", "fba", "vba", "u"], required=False ), record_length=dict(type='int', required=False), @@ -1107,7 +1107,7 @@ def run_module(): required=False, options=dict( name=dict(arg_type='str', required=False), - type=dict(arg_type='str', required=False, default="SEQ"), + type=dict(arg_type='str', required=False, default="seq"), space_primary=dict(arg_type='int', required=False), space_secondary=dict( arg_type='int', required=False), diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index a9bfd658c..f6b1140fa 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -336,16 +336,16 @@ def test_uss_archive_remove_targets(ansible_zos_module, format): ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="pdse", members=["MEM1", "MEM2", "MEM3"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB"], + "record_format", ["fb", "vb"], ) def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -366,7 +366,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record replace=True, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{src_data_set}({member})", @@ -375,7 +375,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -388,7 +388,7 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record format_dict = dict(name=format) if format == "terse": - format_dict["format_options"] = dict(terse_pack="SPACK") + format_dict["format_options"] = dict(terse_pack="spack") archive_result = hosts.all.zos_archive( src=src_data_set, dest=archive_data_set, @@ -415,16 +415,16 @@ def test_mvs_archive_single_dataset(ansible_zos_module, format, data_set, record ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="pdse", members=["MEM1", "MEM2", "MEM3"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB"], + "record_format", ["fb", "vb"], ) def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -445,7 +445,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data replace=True, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{src_data_set}({member})", @@ -454,7 +454,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -468,7 +468,7 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data format_dict = dict(name=format) format_dict["format_options"] = dict(use_adrdssu=True) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") archive_result = hosts.all.zos_archive( src=src_data_set, dest=archive_data_set, @@ -495,9 +495,9 @@ def test_mvs_archive_single_dataset_use_adrdssu(ansible_zos_module, format, data ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2", "MEM3"]), - dict(dstype="PDSE", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2", "MEM3"]), + dict(dstype="pdse", members=["MEM1", "MEM2", "MEM3"]), ] ) def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, data_set): @@ -514,11 +514,11 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d name=src_data_set, type=data_set.get("dstype"), state="present", - record_format="FB", + record_format="fb", replace=True, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{src_data_set}({member})", @@ -536,7 +536,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d format_dict = dict(name=format) if format == "terse": - format_dict["format_options"] = dict(terse_pack="SPACK") + format_dict["format_options"] = dict(terse_pack="spack") archive_result = hosts.all.zos_archive( src=src_data_set, dest=archive_data_set, @@ -566,9 +566,9 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): @@ -582,7 +582,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -600,7 +600,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( src="{0}*".format(src_data_set), @@ -629,9 +629,9 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, format, data_set): ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, format, data_set): @@ -645,7 +645,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -663,7 +663,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) exclude = "{0}1".format(src_data_set) archive_result = hosts.all.zos_archive( @@ -697,9 +697,9 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set): @@ -713,7 +713,7 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -731,7 +731,7 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( src="{0}*".format(src_data_set), @@ -762,9 +762,9 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, data_set): @@ -778,7 +778,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, n=3, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -801,7 +801,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( src=path_list, @@ -836,9 +836,9 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, format, ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2"]), - dict(dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2"]), + dict(dstype="pdse", members=["MEM1", "MEM2"]), ] ) def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_set): @@ -858,7 +858,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ replace=True, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{src_data_set}({member})", @@ -876,7 +876,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ format_dict = dict(name=format) if format == "terse": - format_dict["format_options"] = dict(terse_pack="SPACK") + format_dict["format_options"] = dict(terse_pack="spack") # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index a35750b63..ca7ef740a 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -307,16 +307,16 @@ def test_backup_and_restore_of_data_set( @pytest.mark.parametrize( "backup_name,space,space_type", [ - (DATA_SET_BACKUP_LOCATION, 10, "M"), - (DATA_SET_BACKUP_LOCATION, 10000, "K"), + (DATA_SET_BACKUP_LOCATION, 10, "m"), + (DATA_SET_BACKUP_LOCATION, 10000, "k"), (DATA_SET_BACKUP_LOCATION, 10, None), - (DATA_SET_BACKUP_LOCATION, 2, "CYL"), - (DATA_SET_BACKUP_LOCATION, 10, "TRK"), - (UNIX_BACKUP_LOCATION, 10, "M"), - (UNIX_BACKUP_LOCATION, 10000, "K"), + (DATA_SET_BACKUP_LOCATION, 2, "cyl"), + (DATA_SET_BACKUP_LOCATION, 10, "trk"), + (UNIX_BACKUP_LOCATION, 10, "m"), + (UNIX_BACKUP_LOCATION, 10000, "k"), (UNIX_BACKUP_LOCATION, 10, None), - (UNIX_BACKUP_LOCATION, 2, "CYL"), - (UNIX_BACKUP_LOCATION, 10, "TRK"), + (UNIX_BACKUP_LOCATION, 2, "cyl"), + (UNIX_BACKUP_LOCATION, 10, "trk"), ], ) def test_backup_and_restore_of_data_set_various_space_measurements( @@ -693,7 +693,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # backup_name=DATA_SET_BACKUP_LOCATION, # overwrite=True, # space=500, -# space_type="M", +# space_type="m", # ) # assert_module_did_not_fail(results) # assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) @@ -706,7 +706,7 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # full_volume=True, # sms_storage_class="DB2SMS10", # space=500, -# space_type="M", +# space_type="m", # ) # assert_module_did_not_fail(results) # assert_data_set_exists_on_volume(hosts, data_set_name, VOLUME) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 197bc9fa3..508a2ce8d 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -428,10 +428,10 @@ ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] # supported data set types -DS_TYPE = ['SEQ', 'PDS', 'PDSE'] +DS_TYPE = ['seq', 'pds', 'pdse'] # not supported data set types -NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] +NS_DS_TYPE = ['esds', 'rrds', 'lds'] USS_BACKUP_FILE = "/tmp/backup.tmp" BACKUP_OPTIONS = [None, "BLOCKIF.TEST.BACKUP", "BLOCKIF.TEST.BACKUP(BACKUP)"] @@ -450,7 +450,7 @@ def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT) hosts = ansible_zos_module hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) - if DS_TYPE in ["PDS", "PDSE"]: + if DS_TYPE in ["pds", "pdse"]: DS_FULL_NAME = DS_NAME + "(MEM)" hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) @@ -1138,7 +1138,7 @@ def test_ds_block_absent(ansible_zos_module, dstype): def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets hosts = ansible_zos_module - ds_type = "SEQ" + ds_type = "seq" params=dict(insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, tmp_hlq="TMPHLQ") kwargs = dict(backup_name=r"TMPHLQ\..") content = TEST_CONTENT @@ -1228,7 +1228,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) content = TEST_CONTENT - if ds_type == "SEQ": + if ds_type == "seq": params["path"] = default_data_set_name+".{0}".format(MEMBER_2) else: params["path"] = default_data_set_name+"({0})".format(MEMBER_2) @@ -1245,7 +1245,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): ] ) # write memeber to verify cases - if ds_type in ["PDS", "PDSE"]: + if ds_type in ["pds", "pdse"]: cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) else: cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) @@ -1321,7 +1321,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"]) hosts.all.zos_data_set(name=ds_name, type=ds_type) - if ds_type in ["PDS", "PDSE"]: + if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) @@ -1360,7 +1360,7 @@ def test_not_exist_ds_block_insertafter_regex(ansible_zos_module): @pytest.mark.ds def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): hosts = ansible_zos_module - ds_type = 'SEQ' + ds_type = 'seq' params=dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") params["insertafter"] = 'SOME_NON_EXISTING_PATTERN' ds_name = get_tmp_ds_name() @@ -1413,7 +1413,7 @@ def test_ds_not_supported(ansible_zos_module, dstype): @pytest.mark.ds -@pytest.mark.parametrize("dstype", ["PDS","PDSE"]) +@pytest.mark.parametrize("dstype", ["pds","pdse"]) def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 6e6a9a073..13e6d367b 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -260,7 +260,7 @@ def populate_partitioned_data_set(hosts, name, ds_type, members=None): Arguments: hosts (object) -- Ansible instance(s) that can call modules. name (str) -- Name of the data set. - ds_type (str) -- Type of the data set (either PDS or PDSE). + ds_type (str) -- Type of the data set (either pds or pdse). members (list, optional) -- List of member names to create. """ if not members: @@ -282,9 +282,9 @@ def get_listcat_information(hosts, name, ds_type): Arguments: hosts (object) -- Ansible instance(s) that can call modules. name (str) -- Name of the data set. - ds_type (str) -- Type of data set ("SEQ", "PDS", "PDSE", "KSDS"). + ds_type (str) -- Type of data set ("seq", "pds", "pdse", "ksds"). """ - if ds_type.upper() == "KSDS": + if ds_type == "ksds": idcams_input = " LISTCAT ENT('{0}') DATA ALL".format(name) else: idcams_input = " LISTCAT ENTRIES('{0}')".format(name) @@ -311,7 +311,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, Arguments: hosts (object) -- Ansible instance(s) that can call modules. name (str) -- Name of the VSAM data set. - type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS) + type (str) -- Type of the VSAM (ksds, esds, rrds, lds) add_data (bool, optional) -- Whether to add records to the VSAM. key_length (int, optional) -- Key length (only for KSDS data sets). key_offset (int, optional) -- Key offset (only for KSDS data sets). @@ -321,7 +321,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, type=ds_type, state="present" ) - if ds_type == "KSDS": + if ds_type == "ksds": params["key_length"] = key_length params["key_offset"] = key_offset @@ -370,7 +370,7 @@ def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, lo # Submit link JCL. job_result = hosts.all.zos_job_submit( src="/tmp/link.jcl", - location="USS", + location="uss", wait_time_s=60 ) for result in job_result.contacted.values(): @@ -1690,7 +1690,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="SEQ", + type="seq", replace=True ) @@ -1739,7 +1739,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="SEQ", + type="seq", replace=True ) @@ -1790,7 +1790,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -1840,7 +1840,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -1890,8 +1890,8 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): hosts.all.zos_data_set( name=src, state="present", - type="SEQ", - record_format="FBA", + type="seq", + record_format="fba", record_length=80, block_size=27920, replace=True @@ -1966,13 +1966,13 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq -@pytest.mark.parametrize("ds_type", [ "PDS", "PDSE", "SEQ"]) +@pytest.mark.parametrize("ds_type", [ "pds", "pdse", "seq"]) def test_copy_dest_lock(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set_1 = get_tmp_ds_name() data_set_2 = get_tmp_ds_name() member_1 = "MEM1" - if ds_type == "PDS" or ds_type == "PDSE": + if ds_type == "pds" or ds_type == "pdse": src_data_set = data_set_1 + "({0})".format(member_1) dest_data_set = data_set_2 + "({0})".format(member_1) else: @@ -1982,9 +1982,9 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): hosts = ansible_zos_module hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) - if ds_type == "PDS" or ds_type == "PDSE": - hosts.all.zos_data_set(name=src_data_set, state="present", type="MEMBER", replace=True) - hosts.all.zos_data_set(name=dest_data_set, state="present", type="MEMBER", replace=True) + if ds_type == "pds" or ds_type == "pdse": + hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -2272,7 +2272,7 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2300,7 +2300,7 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) @@ -2438,7 +2438,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2464,7 +2464,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2495,7 +2495,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="absent") + hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) @@ -2526,7 +2526,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) @@ -2571,10 +2571,10 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="PDSE", + type="pdse", space_primary=5, - space_type="M", - record_format="FBA", + space_type="m", + record_format="fba", record_length=80, replace=True ) @@ -2617,14 +2617,14 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): try: hosts.all.zos_data_set( name=data_set, - type="PDSE", + type="pdse", space_primary=5, - space_type="M", - record_format="FBA", + space_type="m", + record_format="fba", record_length=80, replace=True ) - hosts.all.zos_data_set(name=dest, type="MEMBER", state="present") + hosts.all.zos_data_set(name=dest, type="member", state="present") if src["is_file"]: copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) @@ -2653,31 +2653,31 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="SEQ", is_binary=False), - dict(type="SEQ", is_binary=True), - dict(type="PDS", is_binary=False), - dict(type="PDS", is_binary=True), - dict(type="PDSE", is_binary=False), - dict(type="PDSE", is_binary=True) + dict(type="seq", is_binary=False), + dict(type="seq", is_binary=True), + dict(type="pds", is_binary=False), + dict(type="pds", is_binary=True), + dict(type="pdse", is_binary=False), + dict(type="pdse", is_binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "SEQ": - hosts.all.zos_data_set(name=src, type="MEMBER") + if args["type"] != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) + hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) verify_copy = hosts.all.shell( @@ -2700,32 +2700,32 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="SEQ", force=False), - dict(type="SEQ", force=True), - dict(type="PDS", force=False), - dict(type="PDS", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True) + dict(type="seq", force=False), + dict(type="seq", force=True), + dict(type="pds", force=False), + dict(type="pds", force=True), + dict(type="pdse", force=False), + dict(type="pdse", force=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if args["type"] == "SEQ" else "{0}(TEST)".format(src_data_set) + src = src_data_set if args["type"] == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=args["type"]) - if args["type"] != "SEQ": - hosts.all.zos_data_set(name=src, type="MEMBER") + if args["type"] != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), executable=SHELL_EXECUTABLE ) - hosts.all.zos_data_set(name=dest_data_set, type="PDSE", replace=True) - hosts.all.zos_data_set(name=dest, type="MEMBER") + hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) + hosts.all.zos_data_set(name=dest, type="member") copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) verify_copy = hosts.all.shell( @@ -2844,7 +2844,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_dir = "/tmp/testdir" @@ -2859,8 +2859,8 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): name=dest, type=src_type, space_primary=5, - space_type="M", - record_format="FBA", + space_type="m", + record_format="fba", record_length=80, ) @@ -2883,18 +2883,18 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module src_data_set = get_tmp_ds_name() - src = src_data_set if src_type == "SEQ" else "{0}(TEST)".format(src_data_set) + src = src_data_set if src_type == "seq" else "{0}(TEST)".format(src_data_set) dest_data_set = get_tmp_ds_name() dest = "{0}(MEMBER)".format(dest_data_set) try: hosts.all.zos_data_set(name=src_data_set, type=src_type) - if src_type != "SEQ": - hosts.all.zos_data_set(name=src, type="MEMBER") + if src_type != "seq": + hosts.all.zos_data_set(name=src, type="member") hosts.all.shell( "decho 'Records for test' '{0}'".format(src), @@ -2924,10 +2924,10 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(src_type="PDS", dest_type="PDS"), - dict(src_type="PDS", dest_type="PDSE"), - dict(src_type="PDSE", dest_type="PDS"), - dict(src_type="PDSE", dest_type="PDSE"), + dict(src_type="pds", dest_type="pds"), + dict(src_type="pds", dest_type="pdse"), + dict(src_type="pdse", dest_type="pds"), + dict(src_type="pdse", dest_type="pdse"), ]) def test_copy_pds_to_existing_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -2979,9 +2979,9 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -2990,12 +2990,12 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3012,24 +3012,24 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) # pre-allocate dest loadlib to copy over with an alias. hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3117,20 +3117,20 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -3138,23 +3138,23 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3267,9 +3267,9 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -3278,12 +3278,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3306,24 +3306,24 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) # allocate dest loadlib to copy over with an alias. hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3337,12 +3337,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): executable=True, aliases=False, dest_data_set={ - 'type': "LIBRARY", - 'record_format': "U", + 'type': "library", + 'record_format': "u", 'record_length': 0, 'block_size': 32760, 'space_primary': 2, - 'space_type': "M", + 'space_type': "m", } ) # copy src loadlib to dest library pds w aliases @@ -3353,12 +3353,12 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): executable=True, aliases=True, dest_data_set={ - 'type': "LIBRARY", - 'record_format': "U", + 'type': "library", + 'record_format': "u", 'record_length': 0, 'block_size': 32760, 'space_primary': 2, - 'space_type': "M", + 'space_type': "m", } ) @@ -3459,9 +3459,9 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -3470,12 +3470,12 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3530,12 +3530,12 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3548,12 +3548,12 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): executable=True, aliases=False, dest_data_set={ - 'type': "PDSE", - 'record_format': "U", + 'type': "pdse", + 'record_format': "u", 'record_length': 0, 'block_size': 32760, 'space_primary': 2, - 'space_type': "M", + 'space_type': "m", } ) else: @@ -3621,9 +3621,9 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=cobol_src_pds, state="present", - type="PDS", + type="pds", space_primary=2, - record_format="FB", + record_format="fb", record_length=80, block_size=3120, replace=True, @@ -3632,12 +3632,12 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=src_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3657,24 +3657,24 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): hosts.all.zos_data_set( name=dest_lib, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) # allocate dest loadlib to copy over with an alias. hosts.all.zos_data_set( name=dest_lib_aliases, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) @@ -3833,12 +3833,12 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts.all.zos_data_set( name=dest, state="present", - type="PDSE", - record_format="U", + type="pdse", + record_format="u", record_length=0, block_size=32760, space_primary=2, - space_type="M", + space_type="m", replace=True ) copy_uss_to_mvs_res = hosts.all.zos_copy( @@ -3884,7 +3884,7 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): hosts.all.zos_data_set( name=dest, state="present", - type="PDSE", + type="pdse", replace=True ) @@ -3920,8 +3920,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): ds_list = ["{0}({1})".format(src, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="PDS") - hosts.all.zos_data_set(name=dest, type="PDS") + hosts.all.zos_data_set(name=src, type="pds") + hosts.all.zos_data_set(name=dest, type="pds") for member in ds_list: hosts.all.shell( @@ -3966,8 +3966,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): dest_ds_list = ["{0}({1})".format(dest, member) for member in member_list] try: - hosts.all.zos_data_set(name=src, type="PDS") - hosts.all.zos_data_set(name=dest, type="PDS") + hosts.all.zos_data_set(name=src, type="pds") + hosts.all.zos_data_set(name=dest, type="pds") for src_member in src_ds_list: hosts.all.shell( @@ -4000,7 +4000,7 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("ds_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("ds_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() @@ -4038,10 +4038,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="PDS", force=False), - dict(ds_type="PDS", force=True), - dict(ds_type="PDSE", force=False), - dict(ds_type="PDSE", force=True) + dict(ds_type="pds", force=False), + dict(ds_type="pds", force=True), + dict(ds_type="pdse", force=False), + dict(ds_type="pdse", force=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4085,7 +4085,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4130,7 +4130,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.aliases -@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4176,7 +4176,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() @@ -4212,10 +4212,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDS", force=False), - dict(type="PDS", force=True), - dict(type="PDSE", force=False), - dict(type="PDSE", force=True), + dict(type="pds", force=False), + dict(type="pds", force=True), + dict(type="pdse", force=False), + dict(type="pdse", force=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4224,7 +4224,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present", replace=True) + hosts.all.zos_data_set(name=dest, type="seq", state="present", replace=True) hosts.all.zos_data_set(name=src_ds, type=args["type"], state="present") for data_set in [src, dest]: @@ -4257,7 +4257,7 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): @pytest.mark.uss @pytest.mark.pdse -@pytest.mark.parametrize("dest_type", ["PDS", "PDSE"]) +@pytest.mark.parametrize("dest_type", ["pds", "pdse"]) def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts = ansible_zos_module src = "/etc/profile" @@ -4267,8 +4267,8 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): hosts.all.zos_data_set( type=dest_type, space_primary=5, - space_type="M", - record_format="FBA", + space_type="m", + record_format="fba", record_length=25, ) @@ -4300,10 +4300,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="PDS", backup=None), - dict(type="PDS", backup="USER.TEST.PDS.BACKUP"), - dict(type="PDSE", backup=None), - dict(type="PDSE", backup="USER.TEST.PDSE.BACKUP"), + dict(type="pds", backup=None), + dict(type="pds", backup="USER.TEST.PDS.BACKUP"), + dict(type="pdse", backup=None), + dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4349,7 +4349,7 @@ def test_backup_pds(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse -@pytest.mark.parametrize("src_type", ["SEQ", "PDS", "PDSE"]) +@pytest.mark.parametrize("src_type", ["seq", "pds", "pdse"]) def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_type): hosts = ansible_zos_module source = get_tmp_ds_name() @@ -4365,8 +4365,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ try: hosts.all.zos_data_set(name=source, type=src_type, state='present') - if src_type != "SEQ": - hosts.all.zos_data_set(name=source_member, type="MEMBER", state='present') + if src_type != "seq": + hosts.all.zos_data_set(name=source_member, type="member", state='present') copy_res = hosts.all.zos_copy( src=source, @@ -4425,8 +4425,8 @@ def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): dest_ds = get_tmp_ds_name() try: - create_vsam_data_set(hosts, src_ds, "KSDS", add_data=True, key_length=12, key_offset=0) - create_vsam_data_set(hosts, dest_ds, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, src_ds, "ksds", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, dest_ds, "ksds", add_data=True, key_length=12, key_offset=0) copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True, force=force) verify_copy = get_listcat_information(hosts, dest_ds, "ksds") @@ -4461,8 +4461,8 @@ def test_backup_ksds(ansible_zos_module, backup): backup_name = None try: - create_vsam_data_set(hosts, src, "KSDS", add_data=True, key_length=12, key_offset=0) - create_vsam_data_set(hosts, dest, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, src, "ksds", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, dest, "ksds", add_data=True, key_length=12, key_offset=0) if backup: copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, backup_name=backup, remote_src=True, force=True) @@ -4544,8 +4544,8 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): volume = volumes.get_available_vol() space_primary = 3 space_secondary = 2 - space_type = "K" - record_format = "VB" + space_type = "k" + record_format = "vb" record_length = 100 block_size = 21000 @@ -4556,7 +4556,7 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): remote_src=True, volume=volume, dest_data_set=dict( - type="SEQ", + type="seq", space_primary=space_primary, space_secondary=space_secondary, space_type=space_type, @@ -4587,7 +4587,7 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): assert len(output_lines) == 5 data_set_attributes = output_lines[2].strip().split() assert len(data_set_attributes) == 4 - assert data_set_attributes[0] == record_format + assert data_set_attributes[0] == record_format.upper() assert data_set_attributes[1] == str(record_length) assert data_set_attributes[2] == str(block_size) assert data_set_attributes[3] == "PS" @@ -4637,7 +4637,7 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( src_file = "/etc/profile" tmphlq = "TMPHLQ" try: - hosts.all.zos_data_set(name=dest, type="SEQ", state="present") + hosts.all.zos_data_set(name=dest, type="seq", state="present") copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 7ab4685c0..f96bfabdc 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -29,12 +29,12 @@ data_set_types = [ - ("PDS"), - ("SEQ"), - ("PDSE"), - ("ESDS"), - ("RRDS"), - ("LDS"), + ("pds"), + ("seq"), + ("pdse"), + ("esds"), + ("rrds"), + ("lds"), ] TEMP_PATH = "/tmp/jcl" @@ -161,7 +161,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 ) # verify data set creation was successful @@ -220,7 +220,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS" + src=TEMP_PATH + "/SAMPLE", location="uss" ) # verify data set creation was successful for result in results.contacted.values(): @@ -266,7 +266,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS" + src=TEMP_PATH + "/SAMPLE", location="uss" ) # verify data set creation was successful for result in results.contacted.values(): @@ -314,7 +314,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="USS" + src=TEMP_PATH + "/SAMPLE", location="uss" ) # verify data set creation was successful for result in results.contacted.values(): @@ -351,7 +351,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss") # verify data set creation was successful for result in results.contacted.values(): @@ -366,7 +366,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ans hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="USS") + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss") # verify data set creation was successful for result in results.contacted.values(): @@ -469,7 +469,7 @@ def test_batch_data_set_creation_and_deletion(ansible_zos_module): results = hosts.all.zos_data_set( batch=[ {"name": dataset, "state": "absent"}, - {"name": dataset, "type": "PDS", "state": "present"}, + {"name": dataset, "type": "pds", "state": "present"}, {"name": dataset, "state": "absent"}, ] ) @@ -486,11 +486,11 @@ def test_batch_data_set_and_member_creation(ansible_zos_module): dataset = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( batch=[ - {"name": dataset, "type": "PDS", "directory_blocks": 5}, - {"name": dataset + "(newmem1)", "type": "MEMBER"}, + {"name": dataset, "type": "pds", "directory_blocks": 5}, + {"name": dataset + "(newmem1)", "type": "member"}, { "name": dataset + "(newmem2)", - "type": "MEMBER", + "type": "member", "state": "present", }, {"name": dataset, "state": "absent"}, @@ -534,7 +534,7 @@ def test_data_member_force_delete(ansible_zos_module): DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) # set up: # create pdse - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="PDSE", replace=True) + results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) for result in results.contacted.values(): assert result.get("changed") is True @@ -543,25 +543,25 @@ def test_data_member_force_delete(ansible_zos_module): batch=[ { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), - "type": "MEMBER", + "type": "member", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), - "type": "MEMBER", + "type": "member", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_3), - "type": "MEMBER", + "type": "member", "state": "present", "replace": True, }, { "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_4), - "type": "MEMBER", + "type": "member", "state": "present", "replace": True, }, @@ -590,7 +590,7 @@ def test_data_member_force_delete(ansible_zos_module): results = hosts.all.zos_data_set( name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_2), state="absent", - type="MEMBER" + type="member" ) for result in results.contacted.values(): assert result.get("failed") is True @@ -598,7 +598,7 @@ def test_data_member_force_delete(ansible_zos_module): # attempt to delete MEMBER_3 with force option. results = hosts.all.zos_data_set( - name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_3), state="absent", type="MEMBER", force=True + name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_3), state="absent", type="member", force=True ) for result in results.contacted.values(): assert result.get("changed") is True @@ -610,7 +610,7 @@ def test_data_member_force_delete(ansible_zos_module): { "name": "{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_4), "state": "absent", - "type": "MEMBER", + "type": "member", "force": True } ] @@ -647,9 +647,9 @@ def test_repeated_operations(ansible_zos_module): DEFAULT_DATA_SET_NAME_WITH_MEMBER = DEFAULT_DATA_SET_NAME + "(MEM)" results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, - type="PDS", + type="pds", space_primary=5, - space_type="CYL", + space_type="cyl", record_length=15, replace=True, ) @@ -660,7 +660,7 @@ def test_repeated_operations(ansible_zos_module): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, - type="PDS", + type="pds", replace=True, ) @@ -669,7 +669,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER", replace=True + name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", replace=True ) for result in results.contacted.values(): @@ -677,7 +677,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER" + name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member" ) for result in results.contacted.values(): @@ -685,7 +685,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER", state="absent" + name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", state="absent" ) for result in results.contacted.values(): @@ -693,7 +693,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="MEMBER", state="absent" + name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", state="absent" ) for result in results.contacted.values(): @@ -713,9 +713,9 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module, hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, - type="SEQ", + type="seq", space_primary=5, - space_type="CYL", + space_type="cyl", record_length=15, volumes=[volume_1, volume_2], ) @@ -750,11 +750,11 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, vo hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, - type="KSDS", + type="ksds", key_length=5, key_offset=0, space_primary=5, - space_type="CYL", + space_type="cyl", volumes=[volume_1, volume_2], ) for result in results.contacted.values(): @@ -843,7 +843,7 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): @pytest.mark.parametrize( "filesystem", - ["HFS", "ZFS"], + ["hfs", "zfs"], ) def test_filesystem_create_and_mount(ansible_zos_module, filesystem): fulltest = True @@ -852,7 +852,7 @@ def test_filesystem_create_and_mount(ansible_zos_module, filesystem): try: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - if filesystem == "HFS": + if filesystem == "hfs": result0 = hosts.all.shell(cmd="zinfo -t sys") for result in result0.contacted.values(): sys_info = result.get("stdout_lines") @@ -909,7 +909,7 @@ def test_data_set_creation_zero_values(ansible_zos_module): results = hosts.all.zos_data_set( name=DEFAULT_DATA_SET_NAME, state="present", - type="KSDS", + type="ksds", replace=True, space_primary=5, space_secondary=0, @@ -941,7 +941,7 @@ def test_data_set_creation_with_tmp_hlq(ansible_zos_module): @pytest.mark.parametrize( "formats", - ["F","FB", "VB", "FBA", "VBA", "U"], + ["f","fb", "vb", "fba", "vba", "u"], ) def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): volumes = Volume_Handler(volumes_on_systems) @@ -955,7 +955,7 @@ def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): state="present", format=formats, space_primary="5", - space_type="M", + space_type="m", volume=volume_1, ) for result in results.contacted.values(): diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index e017450ff..4b74c8834 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -98,7 +98,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, type=ds_type, state="present" ) - if ds_type == "KSDS": + if ds_type == "ksds": params["key_length"] = key_length params["key_offset"] = key_offset @@ -545,7 +545,7 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 ) for result in results.contacted.values(): @@ -576,7 +576,7 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): hosts = ansible_zos_module mlq_size = 3 MVS_VS = get_tmp_ds_name(mlq_size) - create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) hosts.all.file(path=USS_DEST_FILE, state="touch") results = hosts.all.zos_encode( src=MVS_VS, @@ -611,7 +611,7 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): hosts = ansible_zos_module MVS_PS = get_tmp_ds_name() MVS_VS = get_tmp_ds_name() - create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq", record_length=TEST_DATA_RECORD_LENGTH) results = hosts.all.zos_encode( @@ -635,7 +635,7 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): hosts = ansible_zos_module MVS_VS = get_tmp_ds_name() MVS_PDS = get_tmp_ds_name() - create_vsam_data_set(hosts, MVS_VS, "KSDS", add_data=True, key_length=12, key_offset=0) + create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) MVS_PDS_MEMBER = MVS_PDS + '(MEM)' hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) hosts.all.zos_data_set( @@ -671,7 +671,7 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 ) for result in results.contacted.values(): assert result.get("jobs") is not None @@ -803,7 +803,7 @@ def test_vsam_backup(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 ) hosts.all.file(path=TEMP_JCL_PATH, state="absent") # submit JCL to populate KSDS @@ -814,7 +814,7 @@ def test_vsam_backup(ansible_zos_module): ) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 ) hosts.all.zos_encode( diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index b239bbbd9..5b8e7f878 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -89,8 +89,8 @@ def extract_member_name(data_set): def create_and_populate_test_ps_vb(ansible_zos_module, name): params=dict( name=name, - type='SEQ', - record_format='VB', + type='seq', + record_format='vb', record_length='3180', block_size='3190' ) @@ -112,7 +112,7 @@ def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None) Arguments: hosts (object) -- Ansible instance(s) that can call modules. name (str) -- Name of the VSAM data set. - type (str) -- Type of the VSAM (KSDS, ESDS, RRDS, LDS) + type (str) -- Type of the VSAM (ksds, esds, rrds, lds) add_data (bool, optional) -- Whether to add records to the VSAM. key_length (int, optional) -- Key length (only for KSDS data sets). key_offset (int, optional) -- Key offset (only for KSDS data sets). @@ -122,7 +122,7 @@ def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None) type=ds_type, state="present" ) - if ds_type == "KSDS": + if ds_type == "ksds": params["key_length"] = key_length params["key_offset"] = key_offset @@ -188,7 +188,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module TEST_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5) hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True) dest_path = "/tmp/" + TEST_PS @@ -229,7 +229,7 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): def test_fetch_partitioned_data_set(ansible_zos_module): hosts = ansible_zos_module TEST_PDS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + hosts.all.zos_data_set(name=TEST_PDS, state="present", type="pdse") TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -264,7 +264,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(volume_1, test_vsam)), temp_jcl_path) ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(temp_jcl_path), location="USS", wait_time_s=30 + src="{0}/SAMPLE".format(temp_jcl_path), location="uss", wait_time_s=30 ) hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE)) hosts.all.zos_encode( @@ -300,7 +300,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module src_ds = "TEST.VSAM.DATA" - create_vsam_data_set(hosts, src_ds, "KSDS", key_length=12, key_offset=0) + create_vsam_data_set(hosts, src_ds, "ksds", key_length=12, key_offset=0) params = dict(src=src_ds, dest="/tmp/", flat=True) dest_path = "/tmp/" + src_ds try: @@ -347,7 +347,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module TEST_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5) hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) dest_path = "/tmp/" + TEST_PS @@ -368,7 +368,7 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts = ansible_zos_module TEST_PDS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PDS, state="present", type="PDSE") + hosts.all.zos_data_set(name=TEST_PDS, state="present", type="pdse") TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) @@ -417,7 +417,7 @@ def test_fetch_partitioned_data_set_empty_fails(ansible_zos_module): name=pds_name, type="pds", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=25, ) @@ -438,12 +438,12 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): name=pds_name, type="pds", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=25, ) hosts.all.zos_data_set(name=pds_name, type="pds") - hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="MEMBER", replace="yes") + hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes") params = dict(src=pds_name + "(MYDATA)", dest="/tmp/", flat=True) dest_path = "/tmp/MYDATA" try: @@ -535,7 +535,7 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module TEST_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="5m") + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5) ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) @@ -566,11 +566,11 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) name=pds_name, type="pds", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=25, ) - hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="MEMBER", replace="yes") + hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes") os.mkdir(dest_path) with open(full_path, "w") as infile: infile.write(DUMMY_DATA) diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 3a30d9510..37a67ddbc 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -124,7 +124,7 @@ def test_find_pds_members_containing_string(ansible_zos_module): batch=[ dict( name=i + "(MEMBER)", - type="MEMBER", + type="member", state='present', replace='yes' ) for i in PDS_NAMES @@ -185,10 +185,10 @@ def test_exclude_members_from_matched_list(ansible_zos_module): batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(MEMBER)", type="MEMBER") for i in PDS_NAMES] + batch=[dict(name=i + "(MEMBER)", type="member") for i in PDS_NAMES] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(FILE)", type="MEMBER") for i in PDS_NAMES] + batch=[dict(name=i + "(FILE)", type="member") for i in PDS_NAMES] ) find_res = hosts.all.zos_find( pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] @@ -221,8 +221,8 @@ def test_find_data_sets_larger_than_size(ansible_zos_module): TEST_PS1 = 'TEST.PS.ONE' TEST_PS2 = 'TEST.PS.TWO' try: - res = hosts.all.zos_data_set(name=TEST_PS1, state="present", size="5m") - res = hosts.all.zos_data_set(name=TEST_PS2, state="present", size="5m") + res = hosts.all.zos_data_set(name=TEST_PS1, state="present", space_type="m", space_primary=5) + res = hosts.all.zos_data_set(name=TEST_PS2, state="present", space_type="m", space_primary=5) find_res = hosts.all.zos_find(patterns=['TEST.PS.*'], size="1k") for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 @@ -236,7 +236,7 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts = ansible_zos_module TEST_PS = 'USER.FIND.TEST' try: - hosts.all.zos_data_set(name=TEST_PS, state="present", type="SEQ", size="1k") + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="k", space_primary=1) find_res = hosts.all.zos_find(patterns=['USER.FIND.*'], size='-1m') for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 @@ -344,10 +344,10 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(MEMBER)", type="MEMBER") for i in PDS_NAMES] + batch=[dict(name=i + "(MEMBER)", type="member") for i in PDS_NAMES] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(FILE)", type="MEMBER") for i in PDS_NAMES] + batch=[dict(name=i + "(FILE)", type="member") for i in PDS_NAMES] ) find_res = hosts.all.zos_find( pds_paths=['TEST.NONE.PDS.*','TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 584cd6d6d..e92d377d4 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -99,7 +99,7 @@ def test_zos_job_output_job_exists(ansible_zos_module): ) jobs = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None ) for job in jobs.contacted.values(): print(job) @@ -127,7 +127,7 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) result = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None ) hosts.all.file(path=TEMP_PATH, state="absent") dd_name = "JESMSGLG" diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 8f6c6e072..11680ab57 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -57,13 +57,13 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=JDATA_SET_NAME, state="present", type="PDS", replace=True + name=JDATA_SET_NAME, state="present", type="pds", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="DATA_SET", wait_time_s=10 + src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="data_set", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -90,13 +90,13 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=NDATA_SET_NAME, state="present", type="PDS", replace=True + name=NDATA_SET_NAME, state="present", type="pds", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="DATA_SET", wait_time_s=10 + src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="data_set", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index c148b6223..f2f1582fa 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -411,8 +411,8 @@ def test_job_submit_PDS(ansible_zos_module, location): """ Test zos_job_submit with a PDS(MEMBER), also test the default value for 'location', ensure it works with and without the - value "DATA_SET". If default_location is True, then don't - pass a 'location:DATA_SET' allow its default to come through. + value "data_set". If default_location is True, then don't + pass a 'location:data_set' allow its default to come through. """ try: results = None @@ -424,7 +424,7 @@ def test_job_submit_PDS(ansible_zos_module, location): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( @@ -436,7 +436,7 @@ def test_job_submit_PDS(ansible_zos_module, location): ) else: results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), location="DATA_SET", wait_time_s=30 + src="{0}(SAMPLE)".format(data_set_name), location="data_set", wait_time_s=30 ) for result in results.contacted.values(): @@ -456,7 +456,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) hosts.all.zos_data_set( - name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="PDS", replace=True + name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="pds", replace=True ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format( @@ -465,7 +465,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): ) results = hosts.all.zos_job_submit( src="{0}(SAMPLE)".format(DATA_SET_NAME_SPECIAL_CHARS), - location="DATA_SET", + location="data_set", ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -484,7 +484,7 @@ def test_job_submit_USS(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="USS", volume=None + src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -499,7 +499,7 @@ def test_job_submit_LOCAL(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) for result in results.contacted.values(): print(result) @@ -513,7 +513,7 @@ def test_job_submit_LOCAL_extraR(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_BACKSLASH_R) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -526,7 +526,7 @@ def test_job_submit_LOCAL_BADJCL(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_BAD) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) for result in results.contacted.values(): # Expecting: The job completion code (CC) was not in the job log....." @@ -547,7 +547,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True, volumes=volume_1 + name=data_set_name, state="present", type="pds", replace=True, volumes=volume_1 ) hosts.all.shell( @@ -555,10 +555,10 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.zos_data_set( - name=data_set_name, state="uncataloged", type="PDS" + name=data_set_name, state="uncataloged", type="pds" ) - results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="DATA_SET", volume=volume_1) + results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="data_set", volume=volume_1) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 @@ -580,7 +580,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( @@ -589,7 +589,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="DATA_SET", wait_time_s=wait_time_s) + location="data_set", wait_time_s=wait_time_s) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -613,7 +613,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( @@ -622,7 +622,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="DATA_SET", wait_time_s=wait_time_s) + location="data_set", wait_time_s=wait_time_s) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -646,7 +646,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): ) hosts.all.zos_data_set( - name=data_set_name, state="present", type="PDS", replace=True + name=data_set_name, state="present", type="pds", replace=True ) hosts.all.shell( @@ -655,7 +655,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="DATA_SET", wait_time_s=wait_time_s) + location="data_set", wait_time_s=wait_time_s) for result in results.contacted.values(): assert result.get("msg") is not None @@ -682,7 +682,7 @@ def test_job_submit_max_rc(ansible_zos_module, args): f.write(JCL_FILE_CONTENTS_RC_8) results = hosts.all.zos_job_submit( - src=tmp_file.name, location="LOCAL", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] + src=tmp_file.name, location="local", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] ) for result in results.contacted.values(): @@ -771,7 +771,7 @@ def test_job_submit_jinja_template(ansible_zos_module, args): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="LOCAL", + location="local", use_template=True, template_parameters=args["options"] ) @@ -794,7 +794,7 @@ def test_job_submit_full_input(ansible_zos_module): ) results = hosts.all.zos_job_submit( src="{0}/SAMPLE".format(TEMP_PATH), - location="USS", + location="uss", volume=None, # This job used to set wait=True, but since it has been deprecated # and removed, it now waits up to 30 seconds. @@ -814,7 +814,7 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_NO_DSN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="local") import pprint for result in results.contacted.values(): assert result.get("changed") is False @@ -827,7 +827,7 @@ def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): with open(tmp_file.name, "w") as f: f.write(JCL_FILE_CONTENTS_INVALID_USER) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="LOCAL") + results = hosts.all.zos_job_submit(src=tmp_file.name, location="local") for result in results.contacted.values(): assert result.get("changed") is False @@ -843,7 +843,7 @@ def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=20, encoding={ "from": "UTF-8", @@ -864,7 +864,7 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_COPY) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=20, encoding={ "from": "UTF-8", @@ -887,7 +887,7 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_HOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=20, encoding={ "from": "UTF-8", @@ -908,7 +908,7 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_JCLHOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=20, encoding={ "from": "UTF-8", @@ -946,7 +946,7 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="LOCAL", + location="local", wait_time_s=15 ) diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 445c0edfe..cd1421f41 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -224,7 +224,7 @@ def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT) hosts = ansible_zos_module hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) - if DS_TYPE in ["PDS", "PDSE"]: + if DS_TYPE in ["pds", "pdse"]: DS_FULL_NAME = DS_NAME + "(MEM)" hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) @@ -238,10 +238,11 @@ def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT) def remove_ds_environment(ansible_zos_module, DS_NAME): hosts = ansible_zos_module hosts.all.zos_data_set(name=DS_NAME, state="absent") + # supported data set types -DS_TYPE = ['SEQ', 'PDS', 'PDSE'] +DS_TYPE = ['seq', 'pds', 'pdse'] # not supported data set types -NS_DS_TYPE = ['ESDS', 'RRDS', 'LDS'] +NS_DS_TYPE = ['esds', 'rrds', 'lds'] # The encoding will be only use on a few test ENCODING = [ 'ISO8859-1', 'UTF-8'] @@ -793,7 +794,7 @@ def test_ds_line_absent(ansible_zos_module, dstype): def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets hosts = ansible_zos_module - ds_type = "SEQ" + ds_type = "seq" kwargs = dict(backup_name=r"TMPHLQ\..") params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ") content = TEST_CONTENT @@ -848,7 +849,7 @@ def test_ds_line_force(ansible_zos_module, dstype): MEMBER_1, MEMBER_2 = "MEM1", "MEM2" TEMP_FILE = "/tmp/{0}".format(MEMBER_2) content = TEST_CONTENT - if ds_type == "SEQ": + if ds_type == "seq": params["path"] = default_data_set_name+".{0}".format(MEMBER_2) else: params["path"] = default_data_set_name+"({0})".format(MEMBER_2) @@ -865,7 +866,7 @@ def test_ds_line_force(ansible_zos_module, dstype): ] ) # write memeber to verify cases - if ds_type in ["PDS", "PDSE"]: + if ds_type in ["pds", "pdse"]: cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) else: cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) @@ -900,7 +901,7 @@ def test_ds_line_force(ansible_zos_module, dstype): @pytest.mark.ds -@pytest.mark.parametrize("dstype", ["PDS","PDSE"]) +@pytest.mark.parametrize("dstype", ["pds","pdse"]) def test_ds_line_force_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype @@ -1022,7 +1023,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {params['encoding']} temp_file > temp_file ") hosts.all.zos_data_set(name=ds_name, type=ds_type) - if ds_type in ["PDS", "PDSE"]: + if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 1ec7c03f5..39fdd26dd 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -89,7 +89,7 @@ def test_basic_mount(ansible_zos_module, volumes_on_systems): srcfn = create_sourcefile(hosts, volume_1) try: mount_result = hosts.all.zos_mount( - src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" + src=srcfn, path="/pythonx", fs_type="zfs", state="mounted" ) for result in mount_result.values(): assert result.get("rc") == 0 @@ -99,7 +99,7 @@ def test_basic_mount(ansible_zos_module, volumes_on_systems): hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path="/pythonx/", state="absent") @@ -112,10 +112,10 @@ def test_double_mount(ansible_zos_module, volumes_on_systems): volume_1 = volumes.get_available_vol() srcfn = create_sourcefile(hosts, volume_1) try: - hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted") + hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="zfs", state="mounted") # The duplication here is intentional... want to make sure it is seen mount_result = hosts.all.zos_mount( - src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" + src=srcfn, path="/pythonx", fs_type="zfs", state="mounted" ) for result in mount_result.values(): assert result.get("rc") == 0 @@ -125,7 +125,7 @@ def test_double_mount(ansible_zos_module, volumes_on_systems): hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path="/pythonx/", state="absent") @@ -137,9 +137,9 @@ def test_remount(ansible_zos_module, volumes_on_systems): volume_1 = volumes.get_available_vol() srcfn = create_sourcefile(hosts, volume_1) try: - hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted") + hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="zfs", state="mounted") mount_result = hosts.all.zos_mount( - src=srcfn, path="/pythonx", fs_type="ZFS", state="remounted" + src=srcfn, path="/pythonx", fs_type="zfs", state="remounted" ) for result in mount_result.values(): assert result.get("rc") == 0 @@ -148,7 +148,7 @@ def test_remount(ansible_zos_module, volumes_on_systems): hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path="/pythonx/", state="absent") @@ -180,7 +180,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ name=dest, type="pdse", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=80, ) @@ -196,7 +196,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="mounted", persistent=dict(data_store=dest_path), ) @@ -209,7 +209,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path=tmp_file_filename, state="absent") @@ -219,7 +219,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ state="absent", type="pdse", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=80, ) @@ -264,7 +264,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst name=dest, type="pdse", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=80, ) @@ -283,7 +283,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="mounted", persistent=dict( data_store=dest_path, @@ -326,7 +326,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", ) hosts.all.file(path=tmp_file_filename, state="absent") @@ -337,7 +337,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst state="absent", type="pdse", space_primary=5, - space_type="M", + space_type="m", record_format="fba", record_length=80, ) @@ -349,7 +349,7 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) srcfn = create_sourcefile(hosts, volume_1) try: mount_result = hosts.all.zos_mount( - src=srcfn, path="/pythonx", fs_type="ZFS", state="mounted" + src=srcfn, path="/pythonx", fs_type="zfs", state="mounted" ) for result in mount_result.values(): assert result.get("rc") == 0 @@ -358,11 +358,11 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) finally: tmphlq = "TMPHLQ" persist_data_set = get_tmp_ds_name() - hosts.all.zos_data_set(name=persist_data_set, state="present", type="SEQ") + hosts.all.zos_data_set(name=persist_data_set, state="present", type="seq") unmount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", - fs_type="ZFS", + fs_type="zfs", state="absent", tmp_hlq=tmphlq, persistent=dict(data_store=persist_data_set, backup=True) diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index ca5b6384d..cbddd4419 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -62,7 +62,7 @@ def test_disposition_new(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ), ), @@ -86,7 +86,7 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="SEQ", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -118,7 +118,7 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, default_volume = volumes.get_available_vol() default_data_set = get_tmp_ds_name()[:25] hosts.all.zos_data_set( - name=default_data_set, type="SEQ", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -133,12 +133,12 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, return_content=dict(type="text"), replace=True, backup=True, - type="SEQ", + type="seq", space_primary=5, space_secondary=1, - space_type="M", + space_type="m", volumes=default_volume, - record_format="FB" + record_format="fb" ), ), dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), @@ -172,7 +172,7 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=DEFAULT_DATA_SET_WITH_MEMBER, disposition="new", - type="PDS", + type="pds", directory_blocks=15, return_content=dict(type="text"), ), @@ -197,7 +197,7 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' hosts.all.zos_data_set( - name=default_data_set, type="PDS", state="present", replace=True + name=default_data_set, type="pds", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -234,7 +234,7 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="SEQ", + type="seq", state="present", replace=True, volumes=[volume_1], @@ -267,11 +267,11 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch @pytest.mark.parametrize( "space_type,primary,secondary,expected", [ - ("TRK", 3, 1, 169992), - ("CYL", 3, 1, 2549880), - ("B", 3, 1, 56664), - ("K", 3, 1, 56664), - ("M", 3, 1, 3003192), + ("trk", 3, 1, 169992), + ("cyl", 3, 1, 2549880), + ("b", 3, 1, 56664), + ("k", 3, 1, 56664), + ("m", 3, 1, 3003192), ], ) def test_space_types(ansible_zos_module, space_type, primary, secondary, expected): @@ -288,7 +288,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", space_primary=primary, space_secondary=secondary, space_type=space_type, @@ -315,7 +315,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte @pytest.mark.parametrize( "data_set_type", - ["PDS", "PDSE", "LARGE", "BASIC", "SEQ"], + ["pds", "pdse", "large", "basic", "seq"], ) def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -351,7 +351,7 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_s @pytest.mark.parametrize( "data_set_type", - ["KSDS", "RRDS", "LDS", "ESDS"], + ["ksds", "rrds", "lds", "esds"], ) def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_systems): try: @@ -374,7 +374,7 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste volumes=[volume_1], ), ) - if data_set_type != "KSDS" + if data_set_type != "ksds" else dict( dd_data_set=dict( dd_name=SYSPRINT_DD, @@ -400,7 +400,7 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste @pytest.mark.parametrize( "record_format", - ["U", "VB", "VBA", "FB", "FBA"], + ["u", "vb", "vba", "fb", "fba"], ) def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): try: @@ -453,7 +453,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="SEQ", + type="seq", state="present", replace=True, volumes=[volume_1], @@ -505,7 +505,7 @@ def test_return_text_content_encodings( default_data_set = get_tmp_ds_name() results = hosts.all.zos_data_set( name=default_data_set, - type="SEQ", + type="seq", state="present", replace=True, volumes=[volume_1], @@ -544,7 +544,7 @@ def test_reuse_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="SEQ", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -555,7 +555,7 @@ def test_reuse_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", reuse=True, return_content=dict(type="text"), ), @@ -577,7 +577,7 @@ def test_replace_existing_data_set(ansible_zos_module): hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( - name=default_data_set, type="SEQ", state="present", replace=True + name=default_data_set, type="seq", state="present", replace=True ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", @@ -588,7 +588,7 @@ def test_replace_existing_data_set(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", replace=True, return_content=dict(type="text"), ), @@ -619,7 +619,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", replace=True, return_content=dict(type="text"), ), @@ -636,7 +636,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", replace=True, backup=True, return_content=dict(type="text"), @@ -687,7 +687,7 @@ def test_input_empty(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ), ), @@ -719,7 +719,7 @@ def test_input_large(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ), ), @@ -752,7 +752,7 @@ def test_input_provided_as_list(ansible_zos_module): dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ), ), @@ -792,7 +792,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", ), ), dict( @@ -844,7 +844,7 @@ def test_input_return_text_content_encodings( dd_name=SYSPRINT_DD, data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", ), ), dict( @@ -1164,7 +1164,7 @@ def test_file_record_length(ansible_zos_module, record_length): @pytest.mark.parametrize( "record_format", - ["U", "VB", "VBA", "FB", "FBA"], + ["u", "vb", "vba", "fb", "fba"], ) def test_file_record_format(ansible_zos_module, record_format): try: @@ -1353,7 +1353,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", return_content=dict(type="text"), ) ), @@ -1361,7 +1361,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="SEQ", + type="seq", ) ), ], @@ -1391,8 +1391,8 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu hosts = ansible_zos_module default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() - hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="SEQ") + hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") + hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="seq") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1405,7 +1405,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=default_data_set, disposition="new", - type="SEQ", + type="seq", replace=True, backup=True, return_content=dict(type="text"), @@ -1415,7 +1415,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="SEQ", + type="seq", replace=True, backup=True, ) @@ -1462,7 +1462,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): default_data_set = get_tmp_ds_name() DEFAULT_DATA_SET_2 = get_tmp_ds_name() DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' - hosts.all.zos_data_set(name=default_data_set, state="present", type="PDS") + hosts.all.zos_data_set(name=default_data_set, state="present", type="pds") hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -1482,7 +1482,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="SEQ", + type="seq", ) ), ], @@ -1538,7 +1538,7 @@ def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): dd_data_set=dict( data_set_name=DEFAULT_DATA_SET_2, disposition="new", - type="SEQ", + type="seq", ) ), ], @@ -1766,7 +1766,7 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co try: hosts = ansible_zos_module default_data_set = "ANSIBLE.USER.PRIVATE.TEST" - hosts.all.zos_data_set(name=default_data_set, state="present", type="SEQ") + hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") results = hosts.all.zos_mvs_raw(program_name="idcams", auth=True, dds=dds) diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 28cc0d77d..790f5b3ef 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -353,16 +353,16 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2"]), - dict(dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2"]), + dict(dstype="pdse", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["fb", "vb",], ) def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -382,7 +382,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec replace=True ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{DATASET}({member})", @@ -392,7 +392,7 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -405,13 +405,13 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec format_dict = dict(name=format) if format == "terse": - format_dict["format_options"] = dict(terse_pack="SPACK") + format_dict["format_options"] = dict(terse_pack="spack") archive_result = hosts.all.zos_archive( src=DATASET, dest=MVS_DEST_ARCHIVE, format=format_dict, dest_data_set=dict(name=DATASET, - type="SEQ", + type="seq", record_format=record_format, record_length=record_length), ) @@ -464,16 +464,16 @@ def test_mvs_unarchive_single_data_set(ansible_zos_module, format, data_set, rec ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2"]), - dict(dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2"]), + dict(dstype="pdse", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["fb", "vb",], ) def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -493,7 +493,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d replace=True ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{DATASET}({member})", @@ -503,7 +503,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -517,7 +517,7 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d format_dict = dict(name=format) format_dict["format_options"] = dict(use_adrdssu=True) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") archive_result = hosts.all.zos_archive( src=DATASET, dest=MVS_DEST_ARCHIVE, @@ -564,9 +564,9 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, data_set): @@ -580,7 +580,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, n=1, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -598,10 +598,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -640,9 +640,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format, data_set): @@ -656,7 +656,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -674,10 +674,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -726,9 +726,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include(ansible_zos_module, ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format, data_set): @@ -742,7 +742,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -760,10 +760,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -808,9 +808,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude(ansible_zos_module, ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_set): @@ -824,7 +824,7 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s n=2, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -842,10 +842,10 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) archive_result = hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -885,9 +885,9 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, format, data_s ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ"), - dict(dstype="PDS"), - dict(dstype="PDSE"), + dict(dstype="seq"), + dict(dstype="pds"), + dict(dstype="pdse"), ] ) @pytest.mark.parametrize( @@ -911,7 +911,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f n=1, type=data_set.get("dstype")) ds_to_write = target_ds_list - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: target_member_list = [] for ds in target_ds_list: target_member_list.extend( @@ -929,10 +929,10 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f format_dict = dict(name=format, format_options=dict()) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") format_dict["format_options"].update(use_adrdssu=True) hosts.all.zos_archive( - src=""" "{0}*" """.format(DATASET), + src="{0}*".format(DATASET), dest=MVS_DEST_ARCHIVE, format=format_dict, ) @@ -974,16 +974,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f ]) @pytest.mark.parametrize( "data_set", [ - dict(dstype="SEQ", members=[""]), - dict(dstype="PDS", members=["MEM1", "MEM2"]), - dict(dstype="PDSE", members=["MEM1", "MEM2"]), + dict(dstype="seq", members=[""]), + dict(dstype="pds", members=["MEM1", "MEM2"]), + dict(dstype="pdse", members=["MEM1", "MEM2"]), ] ) @pytest.mark.parametrize( "record_length", [80, 120] ) @pytest.mark.parametrize( - "record_format", ["FB", "VB",], + "record_format", ["fb", "vb",], ) def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, data_set, record_length, record_format): try: @@ -1004,7 +1004,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da record_format=record_format, ) # Create members if needed - if data_set.get("dstype") in ["PDS", "PDSE"]: + if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): hosts.all.zos_data_set( name=f"{DATASET}({member})", @@ -1013,7 +1013,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da ) # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW - if record_format in ["V", "VB"]: + if record_format in ["v", "vb"]: test_line = "a" * (record_length - 4) else: test_line = "a" * record_length @@ -1027,7 +1027,7 @@ def test_mvs_unarchive_single_data_set_remote_src(ansible_zos_module, format, da format_dict = dict(name=format) format_dict["format_options"] = dict(use_adrdssu=True) if format == "terse": - format_dict["format_options"].update(terse_pack="SPACK") + format_dict["format_options"].update(terse_pack="spack") archive_result = hosts.all.zos_archive( src=DATASET, dest=MVS_DEST_ARCHIVE, diff --git a/tests/unit/test_zos_backup_restore_unit.py b/tests/unit/test_zos_backup_restore_unit.py index a751a7599..5920febdb 100644 --- a/tests/unit/test_zos_backup_restore_unit.py +++ b/tests/unit/test_zos_backup_restore_unit.py @@ -93,7 +93,7 @@ def assert_args_invalid(zos_backup_restore, arguments): @pytest.mark.parametrize( - "space_type", ["K", "M", "G", "TRK", "CYL", "k", "m", "g", "trk", "cyl"] + "space_type", ["k", "m", "g", "trk", "cyl"] ) def test_valid_space_types(zos_backup_restore_mocker, space_type): valid_args = dict( diff --git a/tests/unit/test_zos_mvs_raw_unit.py b/tests/unit/test_zos_mvs_raw_unit.py index f528412da..e50734756 100644 --- a/tests/unit/test_zos_mvs_raw_unit.py +++ b/tests/unit/test_zos_mvs_raw_unit.py @@ -59,7 +59,7 @@ def run_command(self, *args, **kwargs): "new", "keep", "keep", - "CYL", + "cyl", 5, 1, "smsclas1", @@ -67,17 +67,17 @@ def run_command(self, *args, **kwargs): "smsclas1", 80, "SOMEKEYLAB100", - "LIBRARY", + "library", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "U", + "u", ), ( "data.set.name(mem1)", "shr", "delete", "keep", - "TRK", + "trk", "5", 1, "smsclas1", @@ -85,17 +85,17 @@ def run_command(self, *args, **kwargs): "smsclas3", 120, "somekeylab1", - "BASIC", + "basic", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "FB", + "fb", ), ( "DATA.NAME.HERE.NOW", "old", "catalog", "uncatalog", - "B", + "b", 55, "100", "SMSCLASS", @@ -103,17 +103,17 @@ def run_command(self, *args, **kwargs): "smscD@s3", 120, "keyfor342fdsme", - "LARGE", + "large", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "FBA", + "fba", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "G", + "g", 1, "9", "SMSCLASS", @@ -121,17 +121,17 @@ def run_command(self, *args, **kwargs): "", 120, "keyfor342fdsme", - "PDSE", + "pdse", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "VB", + "vb", ), ( "DAT$.now", "new", "delete", "keep", - "M", + "m", 1, 9, "SMSCLASS", @@ -139,10 +139,10 @@ def run_command(self, *args, **kwargs): "", 0, "", - "LDS", + "lds", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "VBA", + "vba", ), ], ) @@ -237,7 +237,7 @@ def test_argument_parsing_data_set( "delete", 0, 100, - "FB", + "fb", "record", "r", ["ocreat", "oappend", "onoctty"], @@ -248,14 +248,14 @@ def test_argument_parsing_data_set( "delete", 200, "100", - "FBA", + "fba", "record", "w", ["oappend", "osync"], ), - ("/u/OEUSR01", "keep", "delete", 0, 100, "VB", "binary", "rw", ["ononblock"]), - ("/u/testmeee", "keep", "delete", 0, 100, "VBA", "record", "read_only", []), - ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "U", "text", "write_only", []), + ("/u/OEUSR01", "keep", "delete", 0, 100, "vb", "binary", "rw", ["ononblock"]), + ("/u/testmeee", "keep", "delete", 0, 100, "vba", "record", "read_only", []), + ("/u/hellow/d/or4ld", "keep", "keep", 0, 100, "u", "text", "write_only", []), ], ) def test_argument_parsing_unix( @@ -338,7 +338,7 @@ def test_argument_parsing_unix( "old", "keep", "keep", - "CYL", + "cyl", 5, 1, "smsclas1", @@ -346,17 +346,17 @@ def test_argument_parsing_unix( "smsclas1", 80, "SOMEKEYLAB100", - "LIBRARY", + "library", {"label": "keyforme", "encoding": "h"}, {"label": "keyforme2", "encoding": "h"}, - "U", + "u", ), ( "data.set.name(mem1waytoolong)", "excl", "delete", "keep", - "TRK", + "trk", "5", 1, "smsclas1", @@ -364,10 +364,10 @@ def test_argument_parsing_unix( "smsclas3", 120, "somekeylab1", - "BASIC", + "basic", {"label": "keyforme", "encoding": "l"}, {"label": "keyforme2", "encoding": "h"}, - "FB", + "fb", ), ( "DATA.NAME.HERE.NOW", @@ -382,17 +382,17 @@ def test_argument_parsing_unix( "smscD@s3", 120, "keyfor342fdsme", - "LARGE", + "large", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "FBA", + "fba", ), ( "DAT@.now", "mod", "delete", "uncatalog", - "G", + "g", 1, "9", "SMSCLASSsss", @@ -400,17 +400,17 @@ def test_argument_parsing_unix( "", 120, "keyfor342fdsme", - "PDSE", + "pdse", {"label": "keyforME", "encoding": "l"}, {"label": "KEY4me", "encoding": "h"}, - "VB", + "vb", ), ( "DAT$.now", "new", "delete", "meep", - "M", + "m", 1, 9, "SMSCLASS", @@ -418,10 +418,10 @@ def test_argument_parsing_unix( "", 0, "", - "KSDSS", + "ksdss", {"label": "keyforME", "encoding": "l"}, {"label": "keyyyyy343asdfasfsdfa", "encoding": "l"}, - "VBA", + "vba", ), ], ) @@ -525,7 +525,7 @@ def test_argument_parsing_data_set_failure_path( "delete", 200, "100", - "FBA", + "fba", "record", "w", ["append", "osync"], @@ -537,12 +537,12 @@ def test_argument_parsing_data_set_failure_path( "delete", 0, 100, - "VBA", + "vba", "record", "read_only", ["hello"], ), - ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "U", "text", None, []), + ("/u/hellow/d/or4ld", "meep", "keep", 0, 100, "u", "text", None, []), ], ) def test_argument_parsing_unix_failure_path( @@ -620,7 +620,7 @@ def test_ksds_defaults( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "KSDS", + "type": "ksds", } }, ], @@ -663,7 +663,7 @@ def test_ksds_exception_key_length( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "ESDS", + "type": "esds", "key_length": 5, } }, @@ -693,7 +693,7 @@ def test_ksds_exception_key_offset( "dd_name": "MYDD1", "data_set_name": "my.ds", "disposition": "new", - "type": "ESDS", + "type": "esds", "key_offset": 5, } }, From 18486dfee3a4f3705f3a4013637a3751cdf326a8 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:08:02 -0600 Subject: [PATCH 341/495] [Documentation][zos_data_set] Add and standarize docstrings on modules/zos_data_set.py (#1347) * First advance to docstrings on modules/zos_data_set.py * Add and standarize docstrings on modules/zos_data_set.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style * Modified docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1347-update-docstring-zos_data_set.yml | 3 + plugins/modules/zos_data_set.py | 292 ++++++++++++++++-- 2 files changed, 272 insertions(+), 23 deletions(-) create mode 100644 changelogs/fragments/1347-update-docstring-zos_data_set.yml diff --git a/changelogs/fragments/1347-update-docstring-zos_data_set.yml b/changelogs/fragments/1347-update-docstring-zos_data_set.yml new file mode 100644 index 000000000..581ab1aa9 --- /dev/null +++ b/changelogs/fragments/1347-update-docstring-zos_data_set.yml @@ -0,0 +1,3 @@ +trivial: + - zos_data_set - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1347). \ No newline at end of file diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 446fd6fe7..b500eb84a 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -736,20 +736,27 @@ def get_individual_data_set_parameters(params): """Builds a list of data set parameters to be used in future operations. - Arguments: - params {dict} -- The parameters from + Parameters + ---------- + params : dict + The parameters from Ansible's AnsibleModule object module.params. - Raises: - ValueError: Raised if top-level parameters "name" - and "batch" are both provided. - ValueError: Raised if neither top-level parameters "name" - or "batch" are provided. - - Returns: - [list] -- A list of dicts where each list item + Returns + ------- + Union[dict] + A list of dicts where each list item represents one data set. Each dictionary holds the parameters (passed to the zos_data_set module) for the data set which it represents. + + Raises + ------ + ValueError + Raised if top-level parameters "name" + and "batch" are both provided. + ValueError + Raised if neither top-level parameters "name" + or "batch" are provided. """ if params.get("name") and params.get("batch"): raise ValueError( @@ -769,7 +776,31 @@ def get_individual_data_set_parameters(params): # * can be replaced by built-in def data_set_name(contents, dependencies): """Validates provided data set name(s) are valid. - Returns a list containing the name(s) of data sets.""" + Returns a list containing the name(s) of data sets. + + Parameters + ---------- + contents : str + Name of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the dependencies have a batch. + str + The data set name. + + Raises + ------ + ValueError + Data set name must be provided. + ValueError + Data set and member name must be provided. + ValueError + A value is invalid. + """ if dependencies.get("batch"): return None if contents is None: @@ -807,7 +838,25 @@ def data_set_name(contents, dependencies): # * dependent on state def space_type(contents, dependencies): """Validates provided data set unit of space is valid. - Returns the unit of space.""" + Returns the unit of space. + + Parameters + ---------- + contents : str + Unit of space of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + str + The data set unit of space. + + Raises + ------ + ValueError + Value provided is invalid. +""" if dependencies.get("state") == "absent": return "m" if contents is None: @@ -825,7 +874,27 @@ def space_type(contents, dependencies): # * dependent on state def sms_class(contents, dependencies): """Validates provided sms class is of valid length. - Returns the sms class.""" + Returns the sms class. + + Parameters + ---------- + contents : str + Name of the sms class. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + str + The sms class set name. + + Raises + ------ + ValueError + Value is invalid. + """ if dependencies.get("state") == "absent" or contents is None: return None if len(contents) < 1 or len(contents) > 8: @@ -840,7 +909,22 @@ def sms_class(contents, dependencies): def valid_when_state_present(contents, dependencies): """Ensures no arguments that are invalid when state!=present - are allowed.""" + are allowed. + + Parameters + ---------- + contents : str + Arguments to be validated. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + str + Valid arguments. + """ if dependencies.get("state") == "absent" or contents is None: return None return contents @@ -850,7 +934,27 @@ def valid_when_state_present(contents, dependencies): # * dependent on format def record_length(contents, dependencies): """Validates provided record length is valid. - Returns the record length as integer.""" + Returns the record length as integer. + + Parameters + ---------- + contents : str + Length of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + str + The data set length. + + Raises + ------ + ValueError + Value is invalid. + """ if dependencies.get("state") == "absent": return None contents = ( @@ -872,7 +976,26 @@ def record_length(contents, dependencies): # * dependent on state # * dependent on record_length def record_format(contents, dependencies): - """Validates data set format is valid.""" + """Validates data set format is valid. + Returns uppercase data set format. + + Parameters + ---------- + contents : str + Format of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + str + The data set format in uppercase. Default is 'FB'. + + Raises + ------ + ValueError + Value is invalid. + """ if dependencies.get("state") == "absent": return "fb" if contents is None: @@ -880,7 +1003,7 @@ def record_format(contents, dependencies): formats = "|".join(DATA_SET_FORMATS) if not re.fullmatch(formats, contents, re.IGNORECASE): raise ValueError( - "Value {0} is invalid for format argument. format must be of of the following: {1}.".format( + "Value {0} is invalid for format argument. format must be one of the following: {1}.".format( contents, ", ".join(DATA_SET_FORMATS) ) ) @@ -889,8 +1012,27 @@ def record_format(contents, dependencies): # * dependent on state def data_set_type(contents, dependencies): - """Validates data set type is valid.""" - # if dependencies.get("state") == "absent" and contents != "member": + """Validates data set type is valid. + Returns uppercase data set type. + + Parameters + ---------- + contents : str + Type of the dataset. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + str + The data set type in uppercase. Default is PDS. + + Raises + ------ + ValueError + Value is invalid. + """ + # if dependencies.get("state") == "absent" and contents != "MEMBER": # return None if contents is None: return "pds" @@ -907,7 +1049,29 @@ def data_set_type(contents, dependencies): # * dependent on state def volumes(contents, dependencies): """Validates volume is valid. - Returns uppercase volume.""" + Returns uppercase volume. + + Parameters + ---------- + contents : str + Name of the volume. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + str + The volume name. + + Raises + ------ + ValueError + Argument is invalid. + ValueError + Volume is required when state is cataloged. + """ if contents is None: if dependencies.get("state") == "cataloged": raise ValueError("Volume is required when state==cataloged.") @@ -931,7 +1095,31 @@ def volumes(contents, dependencies): # * dependent on type def key_length(contents, dependencies): """Validates data set key length is valid. - Returns data set key length as integer.""" + Returns data set key length as integer. + + Parameters + ---------- + contents : str + key_length. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + int + key_length. + + Raises + ------ + ValueError + Argument is invalid. + ValueError + key_length was not provided when requesting KSDS data set. + ValueError + key_length can not be provided when type is not KSDS. + """ if dependencies.get("state") == "absent": return None if dependencies.get("type") == "ksds" and contents is None: @@ -953,7 +1141,31 @@ def key_length(contents, dependencies): # * dependent on key_length def key_offset(contents, dependencies): """Validates data set key offset is valid. - Returns data set key offset as integer.""" + Returns data set key offset as integer. + + Parameters + ---------- + contents : str + Key offset of the data set. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + None + If the state is absent or contents is none. + int + Key offset of the data set. + + Raises + ------ + ValueError + Argument is invalid. + ValueError + key_offset was not provided when requesting KSDS data set. + ValueError + key_offset can not be provided when type is not KSDS. + """ if dependencies.get("state") == "absent": return None if dependencies.get("type") == "ksds" and contents is None: @@ -974,7 +1186,22 @@ def key_offset(contents, dependencies): def perform_data_set_operations(name, state, **extra_args): """Calls functions to perform desired operations on - one or more data sets. Returns boolean indicating if changes were made.""" + one or more data sets. Returns boolean indicating if changes were made. + + Parameters + ---------- + name : str + Name of the dataset. + state : str + State of the data sets. + **extra_args : dict + Properties of the data sets. + + Returns + ------- + bool + If changes were made. + """ changed = False # passing in **extra_args forced me to modify the acceptable parameters # for multiple functions in data_set.py including ensure_present, replace @@ -995,6 +1222,18 @@ def perform_data_set_operations(name, state, **extra_args): def parse_and_validate_args(params): + """Parse and validate args. + + Parameters + ---------- + params : dict + Params to validated and parsed. + + Returns + ------- + dict + Parsed args. + """ arg_defs = dict( # Used for batch data set args @@ -1202,6 +1441,13 @@ def parse_and_validate_args(params): def run_module(): + """Runs the module. + + Raises + ------ + fail_json + Any exception during processing of data set params. + """ # TODO: add logic to handle aliases during parsing module_args = dict( From ae2495657f545f1d890390e17b6cd26e962c418b Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:08:29 -0600 Subject: [PATCH 342/495] [Documentation][zos_encode] Add and standarize docstrings on modules/zos_encode.py (#1348) * Add and standarize docstrings on modules/zos_encode.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style * Updated docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1348-update-docstring-zos_encode.yml | 3 + plugins/modules/zos_encode.py | 88 ++++++++++++++++++- 2 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1348-update-docstring-zos_encode.yml diff --git a/changelogs/fragments/1348-update-docstring-zos_encode.yml b/changelogs/fragments/1348-update-docstring-zos_encode.yml new file mode 100644 index 000000000..de9c11c17 --- /dev/null +++ b/changelogs/fragments/1348-update-docstring-zos_encode.yml @@ -0,0 +1,3 @@ +trivial: + - zos_encode - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1348). \ No newline at end of file diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 1adc08c01..243abb2d9 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -295,6 +295,25 @@ def check_pds_member(ds, mem): + """Check if a member exists in a PDS. + + Parameters + ---------- + ds : str + PDS data set name. + mem : str + Member name to check if is under PDS. + + Returns + ------- + bool + If it is a member of the data set. + + Raises + ------ + EncodeError + Can not find member in provided dataset. + """ check_rc = False if mem in datasets.list_members(ds): check_rc = True @@ -304,7 +323,25 @@ def check_pds_member(ds, mem): def check_mvs_dataset(ds): - """ To call data_set utils to check if the MVS data set exists or not """ + """To call data_set utils to check if the MVS data set exists or not. + + Parameters + ---------- + ds : str + Data set name. + + Returns + ------- + tuple(bool,str) + If the data set exists and it's type. + + Raises + ------ + EncodeError + If data set is not cataloged. + EncodeError + Unable to determine data set type. + """ check_rc = False ds_type = None if not data_set.DataSet.data_set_exists(ds): @@ -321,7 +358,23 @@ def check_mvs_dataset(ds): def check_file(file): - """ check file is a USS file or an MVS data set """ + """Check file is a USS file or an MVS data set. + + Parameters + ---------- + file : str + File to check. + + Returns + ------- + tuple(bool,bool,str) + If is USS file, MVS dataset, and the dataset type. + + Raises + ------ + EncodeError + The data set is not partitioned. + """ is_uss = False is_mvs = False ds_type = None @@ -347,6 +400,18 @@ def check_file(file): def verify_uss_path_exists(file): + """Verify if USS path exists. + + Parameters + ---------- + file : str + Path of the file. + + Raises + ------ + EncodeError + File does not exist in the directory. + """ if not path.exists(file): mypath = "/" + file.split("/")[0] + "/*" ld = listdir(mypath) @@ -359,6 +424,13 @@ def verify_uss_path_exists(file): def run_module(): + """Runs the module. + + Raises + ------ + fail_json + Exception during execution. + """ module_args = dict( src=dict(type="str", required=True), dest=dict(type="str"), @@ -530,6 +602,18 @@ def run_module(): class EncodeError(Exception): def __init__(self, message): + """Error during encoding. + + Parameters + ---------- + message : str + Human readable string describing the exception. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = 'An error occurred during encoding: "{0}"'.format(message) super(EncodeError, self).__init__(self.msg) From 581fdb277d5add77c40807fb4695c7387ddb0e68 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:09:00 -0600 Subject: [PATCH 343/495] [Documentation][zos_fetch] Add and standarize docstrings on modules/zos_fetch.py (#1349) * Add and standarize docstrings on modules/zos_fetch.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style * Updated docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1349-update-docstring-zos_fetch.yml | 3 + plugins/modules/zos_fetch.py | 159 +++++++++++++++++- 2 files changed, 158 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1349-update-docstring-zos_fetch.yml diff --git a/changelogs/fragments/1349-update-docstring-zos_fetch.yml b/changelogs/fragments/1349-update-docstring-zos_fetch.yml new file mode 100644 index 000000000..a38504c36 --- /dev/null +++ b/changelogs/fragments/1349-update-docstring-zos_fetch.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1349). \ No newline at end of file diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index cc26b622b..fda237768 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -303,16 +303,50 @@ def __init__(self, module): self.module = module def _fail_json(self, **kwargs): - """ Wrapper for AnsibleModule.fail_json """ + """Wrapper for AnsibleModule.fail_json. + + Parameters + ---------- + **kwargs : dict + Arguments to pass to fail_json(). + """ self.module.fail_json(**kwargs) def _run_command(self, cmd, **kwargs): - """ Wrapper for AnsibleModule.run_command """ + """Wrapper for AnsibleModule.run_command. + + Parameters + ---------- + cmd : str + Command to run. + **kwargs : dict + Arguments to pass to run_command(). + + Returns + ------- + tuple(int,str,str) + Return code, standard output and standard error. + """ return self.module.run_command(cmd, **kwargs) def _get_vsam_size(self, vsam): """Invoke IDCAMS LISTCAT command to get the record length and space used. Then estimate the space used by the VSAM data set. + + Parameters + ---------- + vsam : str + VSAM data set name. + + Returns + ------- + tuple(int,int,int) + Total size, max_recl and rec_total. + + Raises + ------ + fail_json + Unable to obtain data set information. """ space_pri = 0 total_size = 0 @@ -350,7 +384,27 @@ def _get_vsam_size(self, vsam): return total_size, max_recl, rec_total def _copy_vsam_to_temp_data_set(self, ds_name): - """ Copy VSAM data set to a temporary sequential data set """ + """Copy VSAM data set to a temporary sequential data set. + + Parameters + ---------- + ds_name : str + VSAM dataset name to be copied into a temp data set. + + Returns + ------- + str + Temporary dataset name. + + Raises + ------ + fail_json + OS error. + fail_json + cmd error while copying dataset. + fail_json + Failed to call IDCAMS. + """ mvs_rc = 0 vsam_size, max_recl, rec_total = self._get_vsam_size(ds_name) # Default in case of max recl being 80 to avoid failures when fetching and empty vsam. @@ -442,6 +496,25 @@ def _copy_vsam_to_temp_data_set(self, ds_name): def _fetch_uss_file(self, src, is_binary, encoding=None): """Convert encoding of a USS file. Return a tuple of temporary file name containing converted data. + + Parameters + ---------- + src : str + Source of the file. + is_binary : bool + If is binary. + encoding : str + The file encoding. + + Returns + ------- + str + File name with the converted data. + + Raises + ------ + fail_json + Any exception ocurred while converting encoding. """ file_path = None if (not is_binary) and encoding: @@ -471,6 +544,25 @@ def _fetch_uss_file(self, src, is_binary, encoding=None): def _fetch_vsam(self, src, is_binary, encoding=None): """Copy the contents of a VSAM to a sequential data set. Afterwards, copy that data set to a USS file. + + Parameters + ---------- + src : str + Source of the file. + is_binary : bool + If is binary. + encoding : str + The file encoding. + + Returns + ------- + str + USS File containing the encoded content of the input data set. + + Raises + ------ + fail_json + Unable to delete temporary dataset. """ temp_ds = self._copy_vsam_to_temp_data_set(src) file_path = self._fetch_mvs_data(temp_ds, is_binary, encoding) @@ -487,6 +579,27 @@ def _fetch_pdse(self, src, is_binary, encoding=None): """Copy a partitioned data set to a USS directory. If the data set is not being fetched in binary mode, encoding for all members inside the data set will be converted. + + Parameters + ---------- + src : str + Source of the dataset. + is_binary : bool + If is binary. + encoding : str + The file encoding. + + Returns + ------- + str + Directory path containing the files of the converted data set members. + + Raises + ------ + fail_json + Error copying partitioned dataset to USS. + fail_json + Error converting encoding of the member. """ dir_path = tempfile.mkdtemp() cmd = "cp -B \"//'{0}'\" {1}" @@ -531,7 +644,28 @@ def _fetch_pdse(self, src, is_binary, encoding=None): def _fetch_mvs_data(self, src, is_binary, encoding=None): """Copy a sequential data set or a partitioned data set member - to a USS file + to a USS file. + + Parameters + ---------- + src : str + Source of the dataset. + is_binary : bool + If is binary. + encoding : str + The file encoding. + + Returns + ------- + str + USS File containing the encoded content of the input data set. + + Raises + ------ + fail_json + Unable to copy to USS. + fail_json + Error converting encoding of the dataset. """ fd, file_path = tempfile.mkstemp() os.close(fd) @@ -571,6 +705,23 @@ def _fetch_mvs_data(self, src, is_binary, encoding=None): def run_module(): + """Runs the module. + + Raises + ------ + fail_json + When parameter verification fails. + fail_json + When the source does not exist or is uncataloged. + fail_json + When it's unable to determine dataset type. + fail_json + While gathering dataset information. + fail_json + When the data set member was not found inside a dataset. + fail_json + When the file does not have appropriate read permissions. + """ # ********************************************************** # # Module initialization # # ********************************************************** # From fdcbf5666c4890aecd9eec4cdeb85038b087fca8 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:09:27 -0600 Subject: [PATCH 344/495] [Documentation][zos_job_query] Add docstrings to modules/zos_job_query.py (#1353) * Add docstrings to modules/zos_job_query.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style --- .../1353-update-docstring-zos_job_query.yml | 3 ++ plugins/modules/zos_job_query.py | 44 ++++++++++++++++++- 2 files changed, 45 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1353-update-docstring-zos_job_query.yml diff --git a/changelogs/fragments/1353-update-docstring-zos_job_query.yml b/changelogs/fragments/1353-update-docstring-zos_job_query.yml new file mode 100644 index 000000000..550be9107 --- /dev/null +++ b/changelogs/fragments/1353-update-docstring-zos_job_query.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_query - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1353). \ No newline at end of file diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index aaa72d9ab..279a3955f 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -266,7 +266,15 @@ def run_module(): - + """Initialize the module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + Any exception while getting job params. + """ module_args = dict( job_name=dict(type="str", required=False, default="*"), owner=dict(type="str", required=False), @@ -313,7 +321,27 @@ def run_module(): def query_jobs(job_name, job_id, owner): - + """Returns jobs that coincide with the given arguments. + + Parameters + ---------- + job_name : str + Name of the jobs. + job_id : str + Id of the jobs. + owner : str + Owner of the jobs. + + Returns + ------- + Union[str] + List with the jobs. + + Raises + ------ + RuntimeError + No job with was found. + """ jobs = [] if job_id: jobs = job_status(job_id=job_id) @@ -327,6 +355,18 @@ def query_jobs(job_name, job_id, owner): def parsing_jobs(jobs_raw): + """Parse job into an understandable format. + + Parameters + ---------- + jobs_raw : dict + Raw jobs. + + Returns + ------- + dict + Parsed jobs. + """ jobs = [] ret_code = {} for job in jobs_raw: From a5d6c35d113bc142042a7f04151f9df949e5d315 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:11:37 -0600 Subject: [PATCH 345/495] [Documentation][zos_lineinfile] Add and standarize docstrings on modules/zos_lineinfile.py (#1355) * Add and standarize docstrings on modules/zos_lineinfile.py * Create changelog fragment * Modify google style to numpy --- .../1355-update-docstring-zos_lineinfile.yml | 3 + plugins/modules/zos_lineinfile.py | 137 ++++++++++++------ 2 files changed, 98 insertions(+), 42 deletions(-) create mode 100644 changelogs/fragments/1355-update-docstring-zos_lineinfile.yml diff --git a/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml b/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml new file mode 100644 index 000000000..3840b2862 --- /dev/null +++ b/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1355). \ No newline at end of file diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index a6576af12..43e85061b 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -295,33 +295,45 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs, force): - """Replace a line with the matching regex pattern - Insert a line before/after the matching pattern - Insert a line at BOF/EOF - - Arguments: - src: {str} -- The z/OS USS file or data set to modify. - line: {str} -- The line to insert/replace into the src. - regexp: {str} -- The regular expression to look for in every line of the src. - If regexp matches, ins_aft/ins_bef will be ignored. - ins_aft: {str} -- Insert the line after matching '*regex*' pattern or EOF. - choices: - - EOF - - '*regex*' - ins_bef: {str} -- Insert the line before matching '*regex*' pattern or BOF. - choices: - - BOF - - '*regex*' - encoding: {str} -- Encoding of the src. - first_match: {bool} -- Take the first matching regex pattern. - backrefs: {bool} -- Back reference - force: {bool} -- force for modify a member part of a task in execution - - Returns: - str -- Information in JSON format. keys: - cmd: {str} -- dsed shell command - found: {int} -- Number of matching regex pattern - changed: {bool} -- Indicates if the source was modified. + """Replace a line with the matching regex pattern. + Insert a line before/after the matching pattern. + Insert a line at BOF/EOF. + + Parameters + ---------- + src : str + The z/OS USS file or data set to modify. + line : str + The line to insert/replace into the src. + regexp : str + The regular expression to look for in every line of the src. + If regexp matches, ins_aft/ins_bef will be ignored. + ins_aft : str + Insert the line after matching '*regex*' pattern or EOF. + choices: + - EOF + - '*regex*' + ins_bef : str + Insert the line before matching '*regex*' pattern or BOF. + choices: + - BOF + - '*regex*' + encoding : str + Encoding of the src. + first_match : bool + Take the first matching regex pattern. + backrefs : bool + Back reference. + force : bool + force for modify a member part of a task in execution. + + Returns + ------- + str + Information in JSON format. keys: + cmd {str} -- dsed shell command + found {int} -- Number of matching regex pattern + changed {bool} -- Indicates if the source was modified. """ return datasets.lineinfile( src, @@ -339,26 +351,46 @@ def present(src, line, regexp, ins_aft, ins_bef, encoding, first_match, backrefs def absent(src, line, regexp, encoding, force): - """Delete lines with matching regex pattern - - Arguments: - src: {str} -- The z/OS USS file or data set to modify. - line: {str} -- The line to be deleted in the src. If line matches, - regexp will be ignored. - regexp: {str} -- The regular expression to look for in every line of the src. - encoding: {str} -- Encoding of the src. - force: {bool} -- force for modify a member part of a task in execution - - Returns: - str -- Information in JSON format. keys: - cmd: {str} -- dsed shell command - found: {int} -- Number of matching regex pattern - changed: {bool} -- Indicates if the source was modified. + """Delete lines with matching regex pattern. + + Parameters + ---------- + src : str + The z/OS USS file or data set to modify. + line : str + The line to be deleted in the src. If line matches, + regexp will be ignored. + regexp : str + The regular expression to look for in every line of the src. + encoding : str + Encoding of the src. + force : bool + Force for modify a member part of a task in execution. + + Returns + ------- + str + Information in JSON format. keys: + cmd {str} -- dsed shell command + found {int} -- Number of matching regex pattern + changed {bool} -- Indicates if the source was modified. """ return datasets.lineinfile(src, line, regex=regexp, encoding=encoding, state=False, debug=True, force=force) def quotedString(string): + """Add escape if string was quoted. + + Parameters + ---------- + string : str + Given string. + + Returns + ------- + str + The string with the quote marks replaced. + """ # add escape if string was quoted if not isinstance(string, str): return string @@ -366,6 +398,27 @@ def quotedString(string): def main(): + """Initialize the module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + regexp is required with backrefs=true. + fail_json + line is required with state=present. + fail_json + One of line or regexp is required with state=absent. + fail_json + Source does not exist. + fail_json + Data set type is NOT supported. + fail_json + Creating backup has failed. + fail_json + dsed return content is NOT in json format. + """ module_args = dict( src=dict( type='str', From 216baa5100fb35ff4f0ebdf11fed29b5c58c3b3b Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:12:04 -0600 Subject: [PATCH 346/495] [Documentation][zos_script] Add and standarize docstrings on modules/zos_script.py (#1390) * Add and standarize docstrings on module-utils/zos_script.py * Add changelog fragment --- .../fragments/1390-update-docstring-zos_script.yml | 3 +++ plugins/modules/zos_script.py | 11 +++++++++++ 2 files changed, 14 insertions(+) create mode 100644 changelogs/fragments/1390-update-docstring-zos_script.yml diff --git a/changelogs/fragments/1390-update-docstring-zos_script.yml b/changelogs/fragments/1390-update-docstring-zos_script.yml new file mode 100644 index 000000000..792bf9698 --- /dev/null +++ b/changelogs/fragments/1390-update-docstring-zos_script.yml @@ -0,0 +1,3 @@ +trivial: + - zos_script - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1390). \ No newline at end of file diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index 0677d187d..e4f93ef21 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -229,6 +229,17 @@ def run_module(): + """Initialize module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + The given chdir does not exist on the system. + fail_json + The script terminated with an error. + """ module = AnsibleModule( argument_spec=dict( chdir=dict(type='str', required=False), From 54ea6baa1ddd00344b8c5c0b62e22f8f4a744857 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:12:21 -0600 Subject: [PATCH 347/495] [Documentation][zos_tso_command] Add and standarize docstrings on modules/zos_tso_command.py (#1391) * Add and standarize docstrings on module-utils/zos_tso_command.py * Add changelog fragment * Modified docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> --- .../1391-update-docstring-zos_tso_command.yml | 3 + plugins/modules/zos_tso_command.py | 65 +++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 changelogs/fragments/1391-update-docstring-zos_tso_command.yml diff --git a/changelogs/fragments/1391-update-docstring-zos_tso_command.yml b/changelogs/fragments/1391-update-docstring-zos_tso_command.yml new file mode 100644 index 000000000..c435799d4 --- /dev/null +++ b/changelogs/fragments/1391-update-docstring-zos_tso_command.yml @@ -0,0 +1,3 @@ +trivial: + - zos_tso_command - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1391). \ No newline at end of file diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 17e190fb2..2ac4a9d32 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -135,6 +135,23 @@ def run_tso_command(commands, module, max_rc): + """Run tso command. + + Parameters + ---------- + commands : str + Commands to run. + module : AnsibleModule + Ansible module to run the command with. + max_rc : int + Max return code. + + Returns + ------- + Union[dict] + The command result details. + + """ script = """/* REXX */ PARSE ARG cmd address tso @@ -152,6 +169,24 @@ def run_tso_command(commands, module, max_rc): def copy_rexx_and_run_commands(script, commands, module, max_rc): + """Copy rexx into a temporary file and run commands. + + Parameters + ---------- + script : str + Script to run the command. + commands : str + Commands to run. + module : AnsibleModule + Ansible module to run the command with. + max_rc : int + Max return code. + + Returns + ------- + Union[dict] + The command result details. + """ command_detail_json = [] delete_on_close = True tmp_file = NamedTemporaryFile(delete=delete_on_close) @@ -180,6 +215,25 @@ def copy_rexx_and_run_commands(script, commands, module, max_rc): def list_or_str_type(contents, dependencies): + """Checks if a variable contains a string or a list of strings and returns it as a list of strings. + + Parameters + ---------- + contents : str | list[str] + String or list of strings. + dependencies + Unused. + + Returns + ------- + str | Union[str] + The parameter given as a list of strings. + + Raises + ------ + ValueError + Invalid argument type. Expected "string or list of strings". + """ failed = False if isinstance(contents, list): for item in contents: @@ -200,6 +254,17 @@ def list_or_str_type(contents, dependencies): def run_module(): + """Initialize module. + + Raises + ------ + fail_json + ValueError on BetterArgParser. + fail_json + Some command(s) failed. + fail_json + An unexpected error occurred. + """ module_args = dict( commands=dict(type="raw", required=True, aliases=["command"]), max_rc=dict(type="int", required=False, default=0), From ec630df94655042fb5ccb7049c252ebfbd9d746c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:12:53 -0600 Subject: [PATCH 348/495] [Documentation][zos_volume_init] Add and standarize docstrings on modules/zos_volume_init.py (#1392) * Add and standarize docstrings on module-utils/zos_tso_command.py * Add changelog fragment --- .../fragments/1392-update-docstring-zos_volume_init.yml | 3 +++ plugins/modules/zos_volume_init.py | 6 ++++++ 2 files changed, 9 insertions(+) create mode 100644 changelogs/fragments/1392-update-docstring-zos_volume_init.yml diff --git a/changelogs/fragments/1392-update-docstring-zos_volume_init.yml b/changelogs/fragments/1392-update-docstring-zos_volume_init.yml new file mode 100644 index 000000000..4536f186c --- /dev/null +++ b/changelogs/fragments/1392-update-docstring-zos_volume_init.yml @@ -0,0 +1,3 @@ +trivial: + - zos_volume_init - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1392). \ No newline at end of file diff --git a/plugins/modules/zos_volume_init.py b/plugins/modules/zos_volume_init.py index 6dbc9f97e..0be4f2a8f 100644 --- a/plugins/modules/zos_volume_init.py +++ b/plugins/modules/zos_volume_init.py @@ -230,7 +230,13 @@ def run_module(): + """Initialize the module. + Raises + ------ + fail_json + 'Index' cannot be False for SMS managed volumes. + """ module_args = dict( address=dict(type="str", required=True), verify_volid=dict(type="str", required=False), From 87218eabcc8a4b6ddc28f5026b403e8f933cf878 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:13:16 -0600 Subject: [PATCH 349/495] [Documentation][zos_apf] Add and standarize docstrings on modules/zos_apf.py (#1393) * Add and standarize docstrings on modules/zos_apf.py * Add changelog fragment * Modified docstring --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1393-update-docstring-zos_apf.yml | 3 ++ plugins/modules/zos_apf.py | 37 +++++++++++++++++++ 2 files changed, 40 insertions(+) create mode 100644 changelogs/fragments/1393-update-docstring-zos_apf.yml diff --git a/changelogs/fragments/1393-update-docstring-zos_apf.yml b/changelogs/fragments/1393-update-docstring-zos_apf.yml new file mode 100644 index 000000000..8a89b7aa0 --- /dev/null +++ b/changelogs/fragments/1393-update-docstring-zos_apf.yml @@ -0,0 +1,3 @@ +trivial: + - zos_apf - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1393). \ No newline at end of file diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 117801306..664b2e493 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -312,6 +312,30 @@ def backupOper(module, src, backup, tmphlq=None): + """Create a backup for a specified USS file or MVS data set. + + Parameters + ---------- + module : AnsibleModule + src : str + Source USS file or data set to backup. + backup : str + Name for the backup. + tmphlq : str + The name of the temporary high level qualifier to use. + + Returns + ------- + str + Backup name. + + Raises + ------ + fail_json + Data set type is NOT supported. + fail_json + Creating backup has failed. + """ # analysis the file type ds_utils = data_set.DataSetUtils(src) file_type = ds_utils.ds_type() @@ -336,6 +360,19 @@ def backupOper(module, src, backup, tmphlq=None): def main(): + """Initialize the module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + Marker length may not exceed 72 characters. + fail_json + library is required. + fail_json + An exception occurred. + """ module = AnsibleModule( argument_spec=dict( library=dict( From 7abaa3618b3cbf9842ec5de347771356e5790c74 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 16 Apr 2024 10:13:47 -0600 Subject: [PATCH 350/495] [Documentation][zos_operator_action_query] Add and standarize docstrings on modules/zos_operator_action_query.py (#1394) * Add and standarize docstrings on modules/zos_operator_action_query.py * Add changelog fragment * Modified docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...te_docstring-zos_operator_action_query.yml | 3 + plugins/modules/zos_operator_action_query.py | 281 +++++++++++++++++- 2 files changed, 272 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml diff --git a/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml b/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml new file mode 100644 index 000000000..25c34fd89 --- /dev/null +++ b/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml @@ -0,0 +1,3 @@ +trivial: + - zos_operator_action_query - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1394). \ No newline at end of file diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 55cd7cd00..ba6e4ee77 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -238,6 +238,15 @@ def run_module(): + """Initialize module. + + Raises + ------ + fail_json + A non-zero return code was received while querying the operator. + fail_json + An unexpected error occurred. + """ module_args = dict( system=dict(type="str", required=False), message_id=dict(type="str", required=False), @@ -317,6 +326,18 @@ def run_module(): def parse_params(params): + """Parse parameters using BetterArgParser. + + Parameters + ---------- + params : dict + Parameters to parse. + + Returns + ------- + dict + Parsed parameters. + """ arg_defs = dict( system=dict(arg_type=system_type, required=False), message_id=dict(arg_type=message_id_type, required=False), @@ -329,24 +350,85 @@ def parse_params(params): def system_type(arg_val, params): + """System type. + + Parameters + ---------- + arg_val : str + Argument to validate. + params : dict + Not used, but obligatory for BetterArgParser. + + Returns + ------- + str + arg_val validated in uppercase. + """ regex = "^(?:[a-zA-Z0-9]{1,8})|(?:[a-zA-Z0-9]{0,7}[*])$" validate_parameters_based_on_regex(arg_val, regex) return arg_val.upper() def message_id_type(arg_val, params): + """Message id type. + + Parameters + ---------- + arg_val : str + Argument to validate. + params : dict + Not used, but obligatory for BetterArgParser. + + Returns + ------- + str + arg_val validated in uppercase. + """ regex = "^(?:[a-zA-Z0-9]{1,})|(?:[a-zA-Z0-9]{0,}[*])$" validate_parameters_based_on_regex(arg_val, regex) return arg_val.upper() def job_name_type(arg_val, params): + """Job name type. + + Parameters + ---------- + arg_val : str + Argument to validate. + params : dict + Not used, but obligatory for BetterArgParser. + + Returns + ------- + str + arg_val validated in uppercase. + """ regex = "^(?:[a-zA-Z0-9]{1,8})|(?:[a-zA-Z0-9]{0,7}[*])$" validate_parameters_based_on_regex(arg_val, regex) return arg_val.upper() def message_filter_type(arg_val, params): + """Message filter type. + + Parameters + ---------- + arg_val : str + Argument to validate. + params : dict + Not used, but obligatory for BetterArgParser. + + Returns + ------- + str + regex of the given argument. + + Raises + ------ + ValidationError + An error occurred during validate the input parameters. + """ try: filter_text = arg_val.get("filter") use_regex = arg_val.get("use_regex") @@ -364,6 +446,25 @@ def message_filter_type(arg_val, params): def validate_parameters_based_on_regex(value, regex): + """Validate parameters based on regex. + + Parameters + ---------- + value : str + Argument to compare to regex pattern. + regex : str + Regex to get pattern from. + + Returns + ------- + str + The value given. + + Raises + ------ + ValidationError + An error occurred during validate the input parameters. + """ pattern = re.compile(regex) if pattern.fullmatch(value): pass @@ -373,7 +474,20 @@ def validate_parameters_based_on_regex(value, regex): def find_required_request(merged_list, params): - """Find the request given the options provided.""" + """Find the request given the options provided. + + Parameters + ---------- + merged_list : list + Merged list to search. + params : dict + Parameters to get for the function. + + Returns + ------- + Union + Filtered list. + """ requests = filter_requests(merged_list, params) return requests @@ -381,9 +495,24 @@ def find_required_request(merged_list, params): def create_merge_list(message_a, message_b, message_filter): """Merge the return lists that execute both 'd r,a,s' and 'd r,a,jn'. For example, if we have: - 'd r,a,s' response like: "742 R MV28 JOB57578 &742 ARC0055A REPLY 'GO'OR 'CANCEL'" + 'd r,a,s' response like: "742 R MV28 JOB57578 &742 ARC0055A REPLY 'GO' OR 'CANCEL'" 'd r,a,jn' response like:"742 R FVFNT29H &742 ARC0055A REPLY 'GO' OR 'CANCEL'" - the results will be merged so that a full list of information returned on condition""" + the results will be merged so that a full list of information returned on condition. + + Parameters + ---------- + message_a : str + Result coming from command 'd r,a,s'. + message_b : str + Result coming from command 'd r,a,jn'. + message_filter : str + Message filter. + + Returns + ------- + Union + Merge of the result of message_a and the result of message_b. + """ list_a = parse_result_a(message_a, message_filter) list_b = parse_result_b(message_b, message_filter) merged_list = merge_list(list_a, list_b) @@ -391,7 +520,20 @@ def create_merge_list(message_a, message_b, message_filter): def filter_requests(merged_list, params): - """filter the request given the params provided.""" + """Filter the request given the params provided. + + Parameters + ---------- + merged_list : list + Merged list to filter. + params : dict + Parameters to get for the function. + + Returns + ------- + Union + Filtered list. + """ system = params.get("system") message_id = params.get("message_id") job_name = params.get("job_name") @@ -406,6 +548,22 @@ def filter_requests(merged_list, params): def handle_conditions(merged_list, condition_type, value): + """Handle conditions. + + Parameters + ---------- + merged_list : list[dict] + List to check. + condition_type : str + Condition type to check. + value + Value to check for. + + Returns + ------- + Union[dict] + The new list. + """ # regex = re.compile(condition_values) newlist = [] exist = False @@ -422,6 +580,24 @@ def handle_conditions(merged_list, condition_type, value): def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): + """Execute operator command. + + Parameters + ---------- + operator_cmd : str + Operator command. + timeout_s : int + Timeout to wait for the command execution, measured in centiseconds. + *args : dict + Arguments for the command. + **kwargs : dict + More arguments for the command. + + Returns + ------- + OperatorQueryResult + The result of the command. + """ # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: timeout_c = 100 * timeout_s response = opercmd.execute(operator_cmd, timeout_c, *args, **kwargs) @@ -433,6 +609,20 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): def match_raw_message(msg, message_filter): + """Match raw message. + + Parameters + ---------- + msg : str + Message to match. + message_filter : str + Filter for the message. + + Return + ------ + bool + If the pattern matches msg. + """ pattern = re.compile(message_filter, re.DOTALL) return pattern.match(msg) @@ -442,7 +632,20 @@ def parse_result_a(result, message_filter): there are usually two formats: - line with job_id: 810 R MV2D JOB58389 &810 ARC0055A REPLY 'GO' OR 'CANCEL' - line without job_id: 574 R MV28 *574 IXG312E OFFLOAD DELAYED FOR.. - also the request contains multiple lines, we need to handle that as well""" + also the request contains multiple lines, we need to handle that as well. + + Parameters + ---------- + result : str + Result coming from command 'd r,a,s'. + message_filter : str + Message filter. + + Returns + ------- + Union[dict[str,str]] + Resulting list. + """ dict_temp = {} list = [] @@ -474,7 +677,20 @@ def parse_result_a(result, message_filter): def parse_result_b(result, message_filter): """Parse the result that comes from command 'd r,a,jn', the main purpose to use this command is to get the job_name and message id, which is not - included in 'd r,a,s'""" + included in 'd r,a,s' + + Parameters + ---------- + result : str + Result coming from command 'd r,a,jn'. + message_filter : str + Message filter. + + Returns + ------- + Union[dict[str,str]] + Resulting list. + """ dict_temp = {} list = [] @@ -506,6 +722,20 @@ def parse_result_b(result, message_filter): def merge_list(list_a, list_b): + """Merge lists. + + Parameters + ---------- + list_a : list + First list to be merged. + list_b : list + Second list to be merged. + + Returns + ------- + Union + Merged of list_a and list_b. + """ merged_list = [] for dict_a in list_a: for dict_b in list_b: @@ -522,6 +752,18 @@ class Error(Exception): class ValidationError(Error): def __init__(self, message): + """An error occurred during validate the input parameters. + + Parameters + ---------- + message : str + Message of the error that ocurred. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = ( 'An error occurred during validate the input parameters: "{0}"'.format( message @@ -538,12 +780,27 @@ def __init__( ): """Response object class to manage the result from executing a command to query for actionable messages. Class will also generate a message - by concatenating stdout and stderr - - Arguments: - rc {str} -- The return code - stdout {str} -- The standard out of the command run - stderr {str} -- The standard error of the command run + by concatenating stdout and stderr. + + Parameters + ---------- + rc : str + The return code. + stdout : str + The standard out of the command run. + stderr : str + The standard error of the command run. + + Attributes + ---------- + rc : str + The return code. + stdout : str + The standard out of the command run. + stderr : str + The standard error of the command run. + message : str + The standard out of the command run. """ self.rc = rc self.stdout = stdout From 3d38011f67f1cf41e5a519f2b18bc4b412d8e911 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 16 Apr 2024 10:52:48 -0600 Subject: [PATCH 351/495] [Bugfix][1239][zos job submit]max_rc_more_than_0_doesn_not_put_change_as_true (#1345) * First iteration of solution * Change dataset * Ensure all cases for false * Remove print * Change behavior for bugfix * Add fragment * Fix latest lower case * Fix uppercase * Remove typo * Remove typo * Fix redundance * Fix test and upper cases * Fix test case * Fix fragment * Return to lower case * Return to lower case --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml | 5 +++++ plugins/modules/zos_job_submit.py | 4 +++- tests/functional/modules/test_zos_fetch_func.py | 5 +++-- tests/functional/modules/test_zos_job_submit_func.py | 6 +++--- 4 files changed, 14 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml diff --git a/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml b/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml new file mode 100644 index 000000000..a09b8fa64 --- /dev/null +++ b/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_job_submit - when the argument max_rc was different than 0 the changed response returned + as false. Fix now return a changed response as true when the rc is not 0 and max_rc is above + or equal to the value of the job. + (https://github.com/ansible-collections/ibm_zos_core/pull/1345). \ No newline at end of file diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 1b56f459d..204c79217 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -1108,7 +1108,9 @@ def assert_valid_return_code(max_rc, job_rc, ret_code, result): # should NOT be 'changed=true' even though the user did override the return code, # a non-zero return code means the job did not change anything, so set it as # result["chagned"]=False, - if job_rc != 0: + if max_rc and job_rc > max_rc: + return False + elif job_rc != 0 and max_rc is None: return False return True diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 5b8e7f878..4d72a6cc5 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -539,12 +539,12 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) - dest_path = "/tmp/" + ds_name + dest_path = "/tmp/" + TEST_PS with open(dest_path, "w") as infile: infile.write(DUMMY_DATA) local_checksum = checksum(dest_path, hash_func=sha256) - params = dict(src=ds_name, dest="/tmp/", flat=True) + params = dict(src=TEST_PS, dest="/tmp/", flat=True) try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -562,6 +562,7 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) pds_name = get_tmp_ds_name() dest_path = "/tmp/" + pds_name full_path = dest_path + "/MYDATA" + pds_name_mem = pds_name + "(MYDATA)" hosts.all.zos_data_set( name=pds_name, type="pds", diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index f2f1582fa..34fb39d4b 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -713,11 +713,11 @@ def test_job_submit_max_rc(ansible_zos_module, args): assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) elif args["max_rc"] == 12: - # Will not fail but changed will be false for the non-zero RC, there - # are other possibilities like an ABEND or JCL ERROR will fail this even + # Will not fail and as the max_rc is set to 12 and the rc is 8 is a change true + # there are other possibilities like an ABEND or JCL ERROR will fail this even # with a MAX RC assert result.get("msg") is None - assert result.get('changed') is False + assert result.get('changed') is True assert result.get("jobs")[0].get("ret_code").get("code") < 12 finally: hosts.all.file(path=tmp_file.name, state="absent") From b198d02c57b7b2a58f74893c2d56118bb0188f28 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 18 Apr 2024 12:01:13 -0600 Subject: [PATCH 352/495] [Bug][zos_find] Filter allocated space when using size filter (#1443) * Update zos_archive choices * Update zos_backup_restore choices * Update zos_copy choices * Update zos_data_set choices * Update module docs * Update zos_job_submit choices * Update zos_mount choices * Update zos_unarchive choices * Fix zos_archive and update its tests This also includes major work on zos_data_set since half of the test suite for zos_archive depends on creating data sets. * Update zos_backup_restore tests * Update zos_blockinfile tests * Update more modules * Updated more tests * Update zos_unarchive and zos_mount * Update zos_backup_restore unit tests * Corrected size value to use allocated size instead of utilized size * Added size fix * Updated test * Corrected test * Updated docs * Updated changelog * Added test --------- Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> --- changelogs/fragments/1443-zos_find-filter-size.yml | 4 ++++ plugins/modules/zos_find.py | 3 ++- tests/functional/modules/test_zos_find_func.py | 14 +++++++++----- 3 files changed, 15 insertions(+), 6 deletions(-) create mode 100644 changelogs/fragments/1443-zos_find-filter-size.yml diff --git a/changelogs/fragments/1443-zos_find-filter-size.yml b/changelogs/fragments/1443-zos_find-filter-size.yml new file mode 100644 index 000000000..a5a8ce029 --- /dev/null +++ b/changelogs/fragments/1443-zos_find-filter-size.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets the correct size + for PDS/Es. + (https://github.com/ansible-collections/ibm_zos_core/pull/1443). \ No newline at end of file diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index b49d65f04..a12241458 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -31,6 +31,7 @@ author: - "Asif Mahmud (@asifmahmud)" - "Demetrios Dimatos (@ddimatos)" + - "Fernando Flores (@fernandofloresg)" options: age: description: @@ -479,7 +480,7 @@ def data_set_attribute_filter( age and not size and _age_filter(ds_age, now, age) ) or ( - size and not age and _size_filter(int(out[5]), size) + size and not age and _size_filter(int(out[6]), size) ) ): filtered_data_sets.add(ds) diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 37a67ddbc..42a8db23e 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -15,6 +15,7 @@ __metaclass__ = type from ibm_zos_core.tests.helpers.volumes import Volume_Handler +import pytest SEQ_NAMES = [ "TEST.FIND.SEQ.FUNCTEST.FIRST", @@ -32,6 +33,8 @@ "TEST.FIND.VSAM.FUNCTEST.FIRST" ] +DATASET_TYPES = ['seq', 'pds', 'pdse'] + def create_vsam_ksds(ds_name, ansible_zos_module, volume="000000"): hosts = ansible_zos_module @@ -118,7 +121,7 @@ def test_find_pds_members_containing_string(ansible_zos_module): search_string = "hello" try: hosts.all.zos_data_set( - batch=[dict(name=i, type='pds') for i in PDS_NAMES] + batch=[dict(name=i, type='pds', space_primary=1, space_type="m") for i in PDS_NAMES] ) hosts.all.zos_data_set( batch=[ @@ -216,13 +219,14 @@ def test_find_data_sets_older_than_age(ansible_zos_module): assert val.get('matched') == 2 -def test_find_data_sets_larger_than_size(ansible_zos_module): +@pytest.mark.parametrize("ds_type", DATASET_TYPES) +def test_find_data_sets_larger_than_size(ansible_zos_module, ds_type): hosts = ansible_zos_module TEST_PS1 = 'TEST.PS.ONE' TEST_PS2 = 'TEST.PS.TWO' try: - res = hosts.all.zos_data_set(name=TEST_PS1, state="present", space_type="m", space_primary=5) - res = hosts.all.zos_data_set(name=TEST_PS2, state="present", space_type="m", space_primary=5) + res = hosts.all.zos_data_set(name=TEST_PS1, state="present", space_primary="1", space_type="m", type=ds_type) + res = hosts.all.zos_data_set(name=TEST_PS2, state="present", space_primary="1", space_type="m", type=ds_type) find_res = hosts.all.zos_find(patterns=['TEST.PS.*'], size="1k") for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 @@ -236,7 +240,7 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts = ansible_zos_module TEST_PS = 'USER.FIND.TEST' try: - hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="k", space_primary=1) + hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_primary="1", space_type="k") find_res = hosts.all.zos_find(patterns=['USER.FIND.*'], size='-1m') for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 From 9b6b051097836a9d00a73377130f4d5af3f24e34 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:03:17 -0600 Subject: [PATCH 353/495] [Documentation][zos_find] Add and standarize docstrings on modules/zos_find.py (#1350) * Add and standarize docstrings on modules/zos_find.py * Create changelog fragment * Modify google style to numpy * Updated docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1350-update-docstring-zos_find.yml | 3 + plugins/modules/zos_find.py | 390 +++++++++++++----- 2 files changed, 297 insertions(+), 96 deletions(-) create mode 100644 changelogs/fragments/1350-update-docstring-zos_find.yml diff --git a/changelogs/fragments/1350-update-docstring-zos_find.yml b/changelogs/fragments/1350-update-docstring-zos_find.yml new file mode 100644 index 000000000..48c1fbce1 --- /dev/null +++ b/changelogs/fragments/1350-update-docstring-zos_find.yml @@ -0,0 +1,3 @@ +trivial: + - zos_find - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1350). \ No newline at end of file diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index a12241458..b269c472d 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -277,18 +277,28 @@ def content_filter(module, patterns, content): """ Find data sets that match any pattern in a list of patterns and - contains the given content - - Arguments: - module {AnsibleModule} -- The Ansible module object being used in the module - patterns {list[str]} -- A list of data set patterns - content {str} -- The content string to search for within matched data sets - - Returns: - dict[ps=set, pds=dict[str, str], searched=int] -- A dictionary containing + contains the given content. + + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used in the module. + patterns : list[str] + A list of data set patterns. + content : str + The content string to search for within matched data sets. + + Returns + ------- + dict[ps=set, pds=dict[str, str], searched=int] + A dictionary containing a set of matched "PS" data sets, a dictionary containing "PDS" data sets and members corresponding to each PDS, an int representing number of total data sets examined. + + Raises + ------ + fail_json: Non-zero return code received while executing ZOAU shell command 'dgrep'. """ filtered_data_sets = dict(ps=set(), pds=dict(), searched=0) for pattern in patterns: @@ -321,15 +331,25 @@ def content_filter(module, patterns, content): def data_set_filter(module, pds_paths, patterns): """ Find data sets that match any pattern in a list of patterns. - Arguments: - module {AnsibleModule} -- The Ansible module object being used - patterns {list[str]} -- A list of data set patterns - - Returns: - dict[ps=set, pds=dict[str, str], searched=int] -- A dictionary containing + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + patterns : list[str] + A list of data set patterns. + + Returns + ------- + dict[ps=set, pds=dict[str, str], searched=int] + A dictionary containing a set of matched "PS" data sets, a dictionary containing "PDS" data sets and members corresponding to each PDS, an int representing number of total data sets examined. + + Raises + ------ + fail_json + Non-zero return code received while executing ZOAU shell command 'dls'. """ filtered_data_sets = dict(ps=set(), pds=dict(), searched=0) patterns = pds_paths or patterns @@ -372,15 +392,21 @@ def pds_filter(module, pds_dict, member_patterns, excludes=None): """ Return all PDS/PDSE data sets whose members match any of the patterns in the given list of member patterns. - Arguments: - module {AnsibleModule} -- The Ansible module object being used in the module - pds_dict {dict[str, str]} -- A dictionary where each key is the name of - of the PDS/PDSE and the value is a list of - members belonging to the PDS/PDSE - member_patterns {list} -- A list of member patterns to search for - - Returns: - dict[str, set[str]] -- Filtered PDS/PDSE with corresponding members + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used in the module. + pds_dict : dict[str, str] + A dictionary where each key is the name of + of the PDS/PDSE and the value is a list of + members belonging to the PDS/PDSE. + member_patterns : list + A list of member patterns to search for. + + Returns + ------- + dict[str, set[str]] + Filtered PDS/PDSE with corresponding members. """ filtered_pds = dict() for pds, members in pds_dict.items(): @@ -412,12 +438,22 @@ def vsam_filter(module, patterns, resource_type, age=None): """ Return all VSAM data sets that match any of the patterns in the given list of patterns. - Arguments: - module {AnsibleModule} -- The Ansible module object being used - patterns {list[str]} -- A list of data set patterns - - Returns: - set[str]-- Matched VSAM data sets + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + patterns : list[str] + A list of data set patterns. + + Returns + ------- + set[str] + Matched VSAM data sets. + + Raises + ------ + fail_json + Non-zero return code received while executing ZOAU shell command 'vls'. """ filtered_data_sets = set() now = time.time() @@ -447,14 +483,26 @@ def data_set_attribute_filter( ): """ Filter data sets based on attributes such as age or size. - Arguments: - module {AnsibleModule} -- The Ansible module object being used - data_sets {set[str]} -- A set of data set names - size {int} -- The size, in bytes, that should be used to filter data sets - age {int} -- The age, in days, that should be used to filter data sets - - Returns: - set[str] -- Matched data sets filtered by age and size + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + data_sets : set[str] + A set of data set names. + size : int + The size, in bytes, that should be used to filter data sets. + age : int + The age, in days, that should be used to filter data sets. + + Returns + ------- + set[str] + Matched data sets filtered by age and size. + + Raises + ------ + fail_json + Non-zero return code received while executing ZOAU shell command 'dls'. """ filtered_data_sets = set() now = time.time() @@ -494,13 +542,24 @@ def volume_filter(module, data_sets, volumes): """Return only the data sets that are allocated in one of the volumes from the list of input volumes. - Arguments: - module {AnsibleModule} -- The Ansible module object - data_sets {set[str]} -- A set of data sets to be filtered - volumes {list[str]} -- A list of input volumes - - Returns: - set[str] -- The filtered data sets + Parameters + ---------- + module : AnsibleModule + The Ansible module object. + data_sets : set[str] + A set of data sets to be filtered. + volumes : list[str] + A list of input volumes. + + Returns + ------- + set[str] + The filtered data sets. + + Raises + ------ + fail_json + Unable to retrieve VTOC information. """ filtered_data_sets = set() for volume in volumes: @@ -518,15 +577,21 @@ def volume_filter(module, data_sets, volumes): def exclude_data_sets(module, data_set_list, excludes): - """Remove data sets that match any pattern in a list of patterns - - Arguments: - module {AnsibleModule} -- The Ansible module object being used - data_set_list {set[str]} -- A set of data sets to be filtered - excludes {list[str]} -- A list of data set patterns to be excluded - - Returns: - set[str] -- The remaining data sets that have not been excluded + """Remove data sets that match any pattern in a list of patterns. + + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + data_set_list : set[str] + A set of data sets to be filtered. + excludes : list[str] + A list of data set patterns to be excluded. + + Returns + ------- + set[str] + The remaining data sets that have not been excluded. """ for ds in set(data_set_list): for ex_pat in excludes: @@ -537,15 +602,21 @@ def exclude_data_sets(module, data_set_list, excludes): def _age_filter(ds_date, now, age): - """Determine whether a given date is older than 'age' - - Arguments: - ds_date {str} -- The input date in the format YYYY/MM/DD - now {float} -- The time elapsed since the last epoch - age {int} -- The age, in days, to compare against - - Returns: - bool -- Whether 'ds_date' is older than 'age' + """Determine whether a given date is older than 'age'. + + Parameters + ---------- + ds_date : str + The input date in the format YYYY/MM/DD. + now : float + The time elapsed since the last epoch. + age : int + The age, in days, to compare against. + + Returns + ------- + bool + Whether 'ds_date' is older than 'age'. """ year, month, day = list(map(int, ds_date.split("/"))) if year == "0000": @@ -561,14 +632,24 @@ def _age_filter(ds_date, now, age): def _get_creation_date(module, ds): - """Retrieve the creation date for a given data set - - Arguments: - module {AnsibleModule} -- The Ansible module object being used - ds {str} -- The name of the data set - - Returns: - str -- The data set creation date in the format "YYYY/MM/DD" + """Retrieve the creation date for a given data set. + + Arguments + --------- + module : AnsibleModule + The Ansible module object being used. + ds : str + The name of the data set. + + Returns + ------- + str + The data set creation date in the format "YYYY/MM/DD". + + Raises + ------ + fail_json + Non-zero return code received while retrieving data set age. """ rc, out, err = mvs_cmd.idcams( " LISTCAT ENT('{0}') HISTORY".format(ds), authorized=True @@ -596,14 +677,19 @@ def _get_creation_date(module, ds): def _size_filter(ds_size, size): - """ Determine whether a given size is greater than the input size - - Arguments: - ds_size {int} -- The input size, in bytes - size {int} -- The size, in bytes, to compare against - - Returns: - bool -- Whether 'ds_size' is greater than 'age' + """Determine whether a given size is greater than the input size. + + Parameters + ---------- + ds_size : int + The input size, in bytes. + size : int + The size, in bytes, to compare against. + + Returns + ------- + bool + Whether 'ds_size' is greater than 'age'. """ if size >= 0 and ds_size >= abs(size): return True @@ -613,15 +699,26 @@ def _size_filter(ds_size, size): def _match_regex(module, pattern, string): - """ Determine whether the input regex pattern matches the string - - Arguments: - module {AnsibleModule} -- The Ansible module object being used - pattern {str} -- The regular expression to match - string {str} -- The string to match - - Returns: - re.Match -- A Match object that matches the pattern to string + """Determine whether the input regex pattern matches the string. + + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + pattern : str + The regular expression to match. + string : str + The string to match. + + Returns + ------- + re.Match + A Match object that matches the pattern to string. + + Raises + ------ + fail_json + Invalid regular expression. """ try: return fullmatch(pattern, string, re.IGNORECASE) @@ -640,7 +737,28 @@ def _dgrep_wrapper( verbose=False, context=None ): - """A wrapper for ZOAU 'dgrep' shell command""" + """A wrapper for ZOAU 'dgrep' shell command. + + Parameters + ---------- + data_set_pattern : str + Data set pattern where to search for content. + content : str + Content to search across the data sets specified in data_set_pattern. + ignore_case : bool + Whether to ignore case or not. + line_num : bool + Whether to display line numbers. + verbose : bool + Extra verbosity, prints names of datasets being searched. + context : int + If context lines are requested, then up to <NUM> lines before and after the matching line are also printed. + + Returns + ------- + tuple(int,str,str) + Return code, standard output and standard error. + """ dgrep_cmd = "dgrep" if ignore_case: dgrep_cmd += " -i" @@ -663,7 +781,28 @@ def _dls_wrapper( verbose=False, migrated=False ): - """A wrapper for ZOAU 'dls' shell command""" + """A wrapper for ZOAU 'dls' shell command. + + Parameters + ---------- + data_set_pattern : str + Data set pattern. + list_details : bool + Display detailed information based on the dataset type. + u_time : bool + Display last usage time. + size : bool + Display size in list. + verbose : bool + Display verbose information. + migrated : bool + Display migrated data sets. + + Returns + ------- + tuple(int,str,str) + Return code, standard output and standard error. + """ dls_cmd = "dls" if migrated: dls_cmd += " -m" @@ -682,7 +821,22 @@ def _dls_wrapper( def _vls_wrapper(pattern, details=False, verbose=False): - """A wrapper for ZOAU 'vls' shell command""" + """A wrapper for ZOAU 'vls' shell command. + + Parameters + ---------- + pattern : str + Data set pattern. + details : bool + Display detailed information based on the dataset type. + verbose : bool + Display verbose information. + + Returns + ------- + tuple(int,str,str) + Return code, standard output and standard error. + """ vls_cmd = "vls" if details: vls_cmd += " -l" @@ -694,6 +848,20 @@ def _vls_wrapper(pattern, details=False, verbose=False): def _match_resource_type(type1, type2): + """Compare that the two types match. + + Parameters + ---------- + type1 : str + One of the types that are expected to match. + type2 : str + One of the types that are expected to match. + + Returns + ------- + bool + If the types match. + """ if type1 == type2: return True if type1 == "CLUSTER" and type2 not in ("DATA", "INDEX"): @@ -702,13 +870,17 @@ def _match_resource_type(type1, type2): def _ds_type(ds_name): - """Utility function to determine the DSORG of a data set + """Utility function to determine the DSORG of a data set. - Arguments: - ds_name {str} -- The name of the data set + Parameters + ---------- + ds_name : str + The name of the data set. - Returns: - str -- The DSORG of the data set + Returns + ------- + str + The DSORG of the data set. """ rc, out, err = mvs_cmd.ikjeft01( " LISTDS '{0}'".format(ds_name), @@ -721,6 +893,25 @@ def _ds_type(ds_name): def run_module(module): + """Initialize parameters. + + Parameters + ---------- + module : AnsibleModule + Ansible Module. + + Returns + ------- + dict + Arguments. + + Raises + ------ + fail_json + Failed to process age. + fail_json + Failed to process size. + """ # Parameter initialization age = module.params.get('age') age_stamp = module.params.get('age_stamp') @@ -817,6 +1008,13 @@ def run_module(module): def main(): + """Initialize module when it's run as main. + + Raises + ------ + fail_json + Parameter verification failed. + """ module = AnsibleModule( argument_spec=dict( age=dict(type="str", required=False), From 9acac9f935b0133b5c4d46d45d78118bbc58e994 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:04:29 -0600 Subject: [PATCH 354/495] [Documentation][zos_gather_facts] Add and standarize docstrings on modules/zos_gather_facts.py (#1351) * Add and standarize docstrings on modules/zos_gather_facts.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...1351-update-docstring-zos_gather_facts.yml | 3 + plugins/modules/zos_gather_facts.py | 67 ++++++++++++++----- 2 files changed, 53 insertions(+), 17 deletions(-) create mode 100644 changelogs/fragments/1351-update-docstring-zos_gather_facts.yml diff --git a/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml b/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml new file mode 100644 index 000000000..31fe8dfda --- /dev/null +++ b/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml @@ -0,0 +1,3 @@ +trivial: + - zos_gather_facts - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1351). \ No newline at end of file diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index 2ea7b0baf..a9df42a49 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -128,11 +128,18 @@ def zinfo_facts_list_builder(gather_subset): """Builds a list of strings to pass into 'zinfo' based off the gather_subset list. - Arguments: - gather_subset {list} -- A list of subsets to pass in. - Returns: - [list[str]] -- A list of strings that contains sanitized subsets. - [None] -- An invalid value was received for the subsets. + + Parameters + ---------- + gather_subset : list + A list of subsets to pass in. + + Returns + ------- + Union[str] + A list of strings that contains sanitized subsets. + None + An invalid value was received for the subsets. """ if gather_subset is None or 'all' in gather_subset: return ["all"] @@ -157,11 +164,17 @@ def zinfo_facts_list_builder(gather_subset): def flatten_zinfo_json(zinfo_dict): """Removes one layer of mapping in the dictionary. Top-level keys correspond to zinfo subsets and are removed. - Arguments: - zinfo_dict {dict} -- A dictionary that contains the parsed result from - the zinfo json string. - Returns: - [dict] -- A flattened dictionary. + + Parameters + ---------- + zinfo_dict : dict + A dictionary that contains the parsed result from + the zinfo json string. + + Returns + ------- + dict + A flattened dictionary. """ d = {} for subset in list(zinfo_dict): @@ -172,13 +185,20 @@ def flatten_zinfo_json(zinfo_dict): def apply_filter(zinfo_dict, filter_list): """Returns a dictionary that contains only the keys which fit the specified filters. - Arguments: - zinfo_dict {dict} -- A flattened dictionary that contains results from - zinfo. - filter_list {list} -- A string list of shell wildcard patterns (i.e. - 'filters') to apply to the zinfo_dict keys. - Returns: - [dict] -- A dictionary with keys that are filtered out. + + Parameters + ---------- + zinfo_dict : dict + A flattened dictionary that contains results from + zinfo. + filter_list : list + A string list of shell wildcard patterns (i.e. + 'filters') to apply to the zinfo_dict keys. + + Returns + ------- + dict + A dictionary with keys that are filtered out. """ if filter_list is None or filter_list == [] or '*' in filter_list: @@ -193,6 +213,19 @@ def apply_filter(zinfo_dict, filter_list): def run_module(): + """Initialize module. + + Raises + ------ + fail_json + The zos_gather_facts module requires ZOAU >= 1.3.0. + fail_json + An invalid subset was passed to Ansible. + fail_json + An invalid subset was detected. + fail_json + An exception has occurred. Unable to gather facts. + """ # define available arguments/parameters a user can pass to the module module_args = dict( gather_subset=dict( From 3c9eae6592d6cc5cd7e0769e0ac79627a98cd5e4 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:05:01 -0600 Subject: [PATCH 355/495] [Documentation][zos_job_output] Add and standarize docstrings on modules/zos_job_output.py (#1352) * Add and standarize docstrings on modules/zos_job_output.py * Create changelog fragment * Modify google style to numpy --- .../1352-update-docstring-zos_job_output.yml | 3 +++ plugins/modules/zos_job_output.py | 13 +++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 changelogs/fragments/1352-update-docstring-zos_job_output.yml diff --git a/changelogs/fragments/1352-update-docstring-zos_job_output.yml b/changelogs/fragments/1352-update-docstring-zos_job_output.yml new file mode 100644 index 000000000..78aac0cac --- /dev/null +++ b/changelogs/fragments/1352-update-docstring-zos_job_output.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_output - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1352). \ No newline at end of file diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index ed5a182d3..6a6328e67 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -431,6 +431,19 @@ def run_module(): + """Initialize module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + job_id or job_name or owner not provided. + fail_json + ZOAU exception. + fail_json + Any exception while fetching jobs. + """ module_args = dict( job_id=dict(type="str", required=False), job_name=dict(type="str", required=False), From f0b5d62855faf2b0531128a130f7bb088d75027b Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:06:50 -0600 Subject: [PATCH 356/495] [Documentation][zos_mount] Add and standarize docstrings on modules/zos_mount.py (#1356) * Add and standarize docstrings on modules/zos_mount.py * Create changelog fragment * Modify google style to numpy * Updated docstring --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1356-update-docstring-zos_mount.yml | 3 + plugins/modules/zos_mount.py | 81 +++++++++++++++++-- 2 files changed, 79 insertions(+), 5 deletions(-) create mode 100644 changelogs/fragments/1356-update-docstring-zos_mount.yml diff --git a/changelogs/fragments/1356-update-docstring-zos_mount.yml b/changelogs/fragments/1356-update-docstring-zos_mount.yml new file mode 100644 index 000000000..a2c09caa5 --- /dev/null +++ b/changelogs/fragments/1356-update-docstring-zos_mount.yml @@ -0,0 +1,3 @@ +trivial: + - zos_mount - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1356). \ No newline at end of file diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 61ca20b9f..8828d9005 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -564,6 +564,31 @@ def mt_backupOper(module, src, backup, tmphlq=None): + """Makes a backup of the source. + + Parameters + ---------- + module : AnsibleModule + AnsibleModule. + src : str + Source USS file or MVS data set. + backup : str + Name for the backup. + tmphlq : str + HLQ to be used for backup dataset. + + Returns + ------- + str + Backup name. + + Raises + ------ + fail_json + Crating backup has failed. + fail_json + Data set type is NOT supported. + """ # analysis the file type ds_utils = data_set.DataSetUtils(src) file_type = ds_utils.ds_type() @@ -590,11 +615,24 @@ def mt_backupOper(module, src, backup, tmphlq=None): def swap_text(original, adding, removing): - """ - swap_text returns original after removing blocks matching removing, - and adding the adding param - original now should be a list of lines without newlines - return is the consolidated file value + """swap_text returns original after removing blocks matching removing, + and adding the adding param. + original now should be a list of lines without newlines. + return is the consolidated file value. + + Parameters + ---------- + original : str + Text to modify. + adding : str + Lines to add. + removing : str + Lines to delete if matched. + + Returns + ------- + str + The consolidated file value. """ content_lines = original @@ -658,6 +696,37 @@ def swap_text(original, adding, removing): def run_module(module, arg_def): + """Initialize module. + + Parameters + ---------- + arg_def : dict + Arguments to use. + + Returns + ------- + dict + Arguments. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + Mount source either is not cataloged or does not exist. + fail_json + Exception encountered during directory creation. + fail_json + Mount destination doesn't exist. + fail_json + Checking filesystem list failed with error. + fail_json + Exception encountered when running unmount. + fail_json + Exception occurred when running mount. + fail_json + Persistent data set is either not cataloged or does not exist. + """ # ******************************************************************** # Verify the validity of module args. BetterArgParser raises ValueError # when a parameter fails its validation check @@ -1042,6 +1111,8 @@ def run_module(module, arg_def): def main(): + """Initialize module when executed as main. + """ global module module = AnsibleModule( From 46a21d20f1186c7a696f4121b901b347e985ee79 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 18 Apr 2024 12:31:02 -0600 Subject: [PATCH 357/495] [Documentation][zos_job_submit] Add and standarize docstrings on modules/zos_job_submit.py (#1354) * Add and standarize docstrings on modules/zos_job_submit.py * Create changelog fragment * Modify google style to numpy * Corrected functino --------- Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1354-update-docstring-zos_job_submit.yml | 3 + plugins/modules/zos_job_submit.py | 99 +++++++++++++++---- 2 files changed, 82 insertions(+), 20 deletions(-) create mode 100644 changelogs/fragments/1354-update-docstring-zos_job_submit.yml diff --git a/changelogs/fragments/1354-update-docstring-zos_job_submit.yml b/changelogs/fragments/1354-update-docstring-zos_job_submit.yml new file mode 100644 index 000000000..c2c0a4b99 --- /dev/null +++ b/changelogs/fragments/1354-update-docstring-zos_job_submit.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_submit - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1354). \ No newline at end of file diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 204c79217..bb3aac1ab 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -648,26 +648,48 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=None, start_time=timer()): - """ Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set. - - Arguments: - module - module instnace to access the module api - src (str) - JCL, can be relative or absolute paths either on controller or USS - - Data set, can be PS, PDS, PDSE Member - src_name (str) - the src name that was provided in the module because through - the runtime src could be replace with a temporary file name - timeout (int) - how long to wait in seconds for a job to complete - is_unix (bool) - True if JCL is a file in USS, otherwise False; Note that all - JCL local to a controller is transfered to USS thus would be - True - volume (str) - volume the data set JCL is located on that will be cataloged before - being submitted - start_time - time the JCL started its submission - - Returns: - job_submitted_id - the JCL job ID returned from submitting a job, else if no - job submits, None will be returned - duration - how long the job ran for in this method + """Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set. + + Parameters + ---------- + module: AnsibleModule + module instance to access the module api. + src : str + JCL, can be relative or absolute paths either on controller or USS + - Data set, can be PS, PDS, PDSE Member. + src_name : str + The src name that was provided in the module because through + the runtime src could be replace with a temporary file name. + timeout : int + How long to wait in seconds for a job to complete. + is_unix : bool + True if JCL is a file in USS, otherwise False; Note that all + JCL local to a controller is transfered to USS thus would be + True. + volume : str + volume the data set JCL is located on that will be cataloged before + being submitted. + start_time : int + time the JCL started its submission. + + Returns + ------- + str + the JCL job ID returned from submitting a job, else if no + job submits, None will be returned. + int + how long the job ran for in this method. + + Raises + ------ + fail_json + Unable to submit job because the data set could not be cataloged on the volume. + fail_json + Unable to submit job, the job submission has failed. + fail_json + The JCL has been submitted but there was an error while fetching its status. + fail_json + The job has been submitted and no job id was returned. """ kwargs = { @@ -801,6 +823,15 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N def run_module(): + """Initialize module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + The value for option 'wait_time_s' is not valid. + """ module_args = dict( src=dict(type="str", required=True), location=dict( @@ -1078,6 +1109,34 @@ def run_module(): def assert_valid_return_code(max_rc, job_rc, ret_code, result): + """Asserts valid return code. + + Parameters + ---------- + max_rc : int + Max return code. + joc_rc : int + Job return code. + ret_code : int + Return code. + result : dict() + Result dictionary. + + Returns + ------- + bool + If job_rc is not 0. + + Raises + ------ + Exception + The job return code was not available in the jobs output. + Exception + The job return code for the submitted job is greater than the value set for option 'max_rc'. + Exception + The step return code for the submitted job is greater than the value set for option 'max_rc'. + """ + if job_rc is None: raise Exception( "The job return code (ret_code[code]) was not available in the jobs output, " From e2a574ffcbfdecb8f5c7df5c2761aff518fe297c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 19 Apr 2024 10:58:24 -0600 Subject: [PATCH 358/495] [Documentation][validation] Add docstrings to module_utils/validation.py (#1336) * Add docstrings to module_utils/validation.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style --- .../1336-update-docstring-validation.yml | 3 ++ plugins/module_utils/validation.py | 30 +++++++++++++++++-- 2 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1336-update-docstring-validation.yml diff --git a/changelogs/fragments/1336-update-docstring-validation.yml b/changelogs/fragments/1336-update-docstring-validation.yml new file mode 100644 index 000000000..547103d46 --- /dev/null +++ b/changelogs/fragments/1336-update-docstring-validation.yml @@ -0,0 +1,3 @@ +trivial: + - validation - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1336). \ No newline at end of file diff --git a/plugins/module_utils/validation.py b/plugins/module_utils/validation.py index c08847503..fe41c0a01 100644 --- a/plugins/module_utils/validation.py +++ b/plugins/module_utils/validation.py @@ -22,11 +22,25 @@ def validate_safe_path(path): - """ - This function is implemented to validate against path traversal attack + """This function is implemented to validate against path traversal attack when using os.path.join function. In this action plugin, path is on the controller. + + Parameters + ---------- + path : str + A file's path. + + Returns + ------- + str + The introduced path. + + Raises + ------ + DirectoryTraversalError + User does not have access to a directory. """ if not os.path.isabs(path): real_path = os.path.realpath(path) @@ -39,6 +53,18 @@ def validate_safe_path(path): class DirectoryTraversalError(Exception): + """User does not have access to a directory. + + Parameters + ---------- + path : str + Directory path. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ def __init__(self, path): self.msg = "Detected directory traversal, user does not have access to {0}".format(path) super().__init__(self.msg) From b521c3d42b7a06c800ecd3bd348aa52c161cde92 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 19 Apr 2024 10:58:59 -0600 Subject: [PATCH 359/495] [Documentation][job] Add docstrings to module_utils/job.py (#1333) * Add docstrings to module_utils/job.py * Create changelog fragment * Modified the google style to numpy * Update changelog fragment * Standarize numpy style --- .../fragments/1333-update-docstring-job.yml | 3 + plugins/module_utils/job.py | 146 +++++++++++++----- 2 files changed, 114 insertions(+), 35 deletions(-) create mode 100644 changelogs/fragments/1333-update-docstring-job.yml diff --git a/changelogs/fragments/1333-update-docstring-job.yml b/changelogs/fragments/1333-update-docstring-job.yml new file mode 100644 index 000000000..124ef2cae --- /dev/null +++ b/changelogs/fragments/1333-update-docstring-job.yml @@ -0,0 +1,3 @@ +trivial: + - job - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1333). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 25483b45d..72b72a90b 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -62,18 +62,29 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. - Keyword Arguments: - job_id (str) -- The job ID to search for (default: {None}) - owner (str) -- The owner of the job (default: {None}) - job_name (str) -- The job name search for (default: {None}) - dd_name (str) -- The data definition to retrieve (default: {None}) - dd_scan (bool) - Whether or not to pull information from the dd's for this job {default: {True}} - duration (int) -- The time the submitted job ran for - timeout (int) - how long to wait in seconds for a job to complete - start_time (int) - time the JCL started its submission - - Returns: - list[dict] -- The output information for a list of jobs matching specified criteria. + Keyword Parameters + ------------------ + job_id : str + The job ID to search for (default: {None}). + owner : str + The owner of the job (default: {None}). + job_name : str + The job name search for (default: {None}). + dd_name : str + The data definition to retrieve (default: {None}). + dd_scan : bool + Whether or not to pull information from the dd's for this job {default: {True}}. + duration : int + The time the submitted job ran for. + timeout : int + How long to wait in seconds for a job to complete. + start_time : int + Time the JCL started its submission. + + Returns + ------- + Union[dict] + The output information for a list of jobs matching specified criteria. If no job status is found it will return a ret_code diction with parameter 'msg_txt" = "The job could not be found. """ @@ -127,6 +138,26 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru def _job_not_found(job_id, owner, job_name, dd_name): + """Returns the information of a not founded job. + + Keyword Parameters + ------------------ + job_id : str + The job ID to search for (default: {None}). + owner : str + The owner of the job (default: {None}). + job_name : str + The job name search for (default: {None}). + dd_name : str + The data definition to retrieve (default: {None}). + + Returns + ------- + Union[dict] + The empty job information in a list. + If no job status is found it will return a ret_code diction with + parameter 'msg_txt" = "The job could not be found. + """ # Note that the text in the msg_txt is used in test cases and thus sensitive to change jobs = [] if job_id != '*' and job_name != '*': @@ -170,18 +201,25 @@ def _job_not_found(job_id, owner, job_name, dd_name): def job_status(job_id=None, owner=None, job_name=None, dd_name=None): """Get the status information of a z/OS job based on various search criteria. - Keyword Arguments: - job_id {str} -- The job ID to search for (default: {None}) - owner {str} -- The owner of the job (default: {None}) - job_name {str} -- The job name search for (default: {None}) - dd_name {str} -- If populated, return ONLY this DD in the job list (default: {None}) - note: no routines call job_status with dd_name, so we are speeding this routine with - 'dd_scan=False' - - Returns: - list[dict] -- The status information for a list of jobs matching search criteria. + Keyword Parameters + ------------------ + job_id : str + The job ID to search for (default: {None}). + owner : str + The owner of the job (default: {None}). + job_name : str + The job name search for (default: {None}). + dd_name : str + If populated, return ONLY this DD in the job list (default: {None}) + note: no routines call job_status with dd_name, so we are speeding this routine with + 'dd_scan=False'. + + Returns + ------- + Union[dict] + The status information for a list of jobs matching search criteria. If no job status is found it will return a ret_code diction with - parameter 'msg_txt" = "The job could not be found." + parameter 'msg_txt" = "The job could not be found.". """ arg_defs = dict( @@ -222,13 +260,17 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): def _parse_steps(job_str): - """Parse the dd section of output to retrieve step-wise CC's + """Parse the dd section of output to retrieve step-wise CC's. - Args: - job_str (str): The content for a given dd. + Parameters + ---------- + job_str : str + The content for a given dd. - Returns: - list[dict]: A list of step names listed as "step executed" the related CC. + Returns + ------- + Union[dict] + A list of step names listed as "step executed" the related CC. """ stp = [] if "STEP WAS EXECUTED" in job_str: @@ -245,6 +287,34 @@ def _parse_steps(job_str): def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): + """Get job status. + + Parameters + ---------- + job_id : str + The job ID to search for (default: {None}). + owner : str + The owner of the job (default: {None}). + job_name : str + The job name search for (default: {None}). + dd_name : str + The data definition to retrieve (default: {None}). + dd_scan : bool + Whether or not to pull information from the dd's for this job {default: {True}}. + duration : int + The time the submitted job ran for. + timeout : int + How long to wait in seconds for a job to complete. + start_time : int + Time the JCL started its submission. + + Returns + ------- + Union[dict] + The output information for a list of jobs matching specified criteria. + If no job status is found it will return a ret_code diction with + parameter 'msg_txt" = "The job could not be found. + """ if job_id == "*": job_id_temp = None else: @@ -431,19 +501,25 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T def _ddname_pattern(contents, resolve_dependencies): - """Resolver for ddname_pattern type arguments + """Resolver for ddname_pattern type arguments. - Arguments: - contents {bool} -- The contents of the argument. + Parameters + ---------- + contents : bool + The contents of the argument. resolved_dependencies {dict} -- Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type + Returns + ------- + str + The arguments contents after any necessary operations. - Returns: - str -- The arguments contents after any necessary operations. + Raises + ------ + ValueError + When contents is invalid argument type. """ if not re.fullmatch( r"^(?:[A-Z]{1}[A-Z0-9]{0,7})|(?:\?{1})$", From fe42127fc59c2f23d1a9f4f32193e30cffdeb8eb Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 19 Apr 2024 10:59:24 -0600 Subject: [PATCH 360/495] [Documentation][ickdsf] Add docstrings to module_utils/ickdsf.py (#1331) * Add docstrings to module_utils/ickdsf.py * Create changelog fragment * Modified the google style to numpy * Update changelog fragment * Standarize numpy style --- .../1331-update-docstring-ickdsf.yml | 3 ++ plugins/module_utils/ickdsf.py | 32 +++++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 changelogs/fragments/1331-update-docstring-ickdsf.yml diff --git a/changelogs/fragments/1331-update-docstring-ickdsf.yml b/changelogs/fragments/1331-update-docstring-ickdsf.yml new file mode 100644 index 000000000..545ba95c1 --- /dev/null +++ b/changelogs/fragments/1331-update-docstring-ickdsf.yml @@ -0,0 +1,3 @@ +trivial: + - ickdsf - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1331). \ No newline at end of file diff --git a/plugins/module_utils/ickdsf.py b/plugins/module_utils/ickdsf.py index 67ddd3d9d..436750c21 100644 --- a/plugins/module_utils/ickdsf.py +++ b/plugins/module_utils/ickdsf.py @@ -26,6 +26,22 @@ def get_init_command(module, result, args): + """Get init command. + + Parameters + ---------- + module : obj + Object from the collection. + result : dic + Results dictionary. + args : dict + Arguments to be formatted. + + Returns + ------- + str + Formatted JCL strings for zos_mvs_raw. + """ # Get parameters from playbooks address = args.get('address') @@ -104,6 +120,22 @@ def get_init_command(module, result, args): def init(module, result, parsed_args): + """Init + + Parameters + ---------- + module : object + The module to give results of. + result : dict + The results of the process. + parsed_args : dict + Parsed arguments to be converted to command. + + Returns + ------- + dict + The dictionary with the results. + """ # Convert args parsed from module to ickdsf INIT command cmd = get_init_command(module, result, parsed_args) From 8da1199bdd717a19cd374f625e8a0c9e8cc3ee41 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 19 Apr 2024 13:28:40 -0600 Subject: [PATCH 361/495] [Documentation][import handler] Add docstrings to module_utils/import_handler.py (#1332) * Add docstrings to module_utils/ickdsf.py * Add docstrings to module_utils/import_handler.py * Delete modifications to ickdsf.py * Create changelog fragment * Revert changes in ickdsf.py * Modified the google style to numpy * Update changelog fragment * Standarize numpy style * Update import_handler.py --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1332-update-docstring-import_handler.yml | 3 + plugins/module_utils/import_handler.py | 92 ++++++++++++++++--- 2 files changed, 81 insertions(+), 14 deletions(-) create mode 100644 changelogs/fragments/1332-update-docstring-import_handler.yml diff --git a/changelogs/fragments/1332-update-docstring-import_handler.yml b/changelogs/fragments/1332-update-docstring-import_handler.yml new file mode 100644 index 000000000..5b32cd32e --- /dev/null +++ b/changelogs/fragments/1332-update-docstring-import_handler.yml @@ -0,0 +1,3 @@ +trivial: + - import_handler - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1332). \ No newline at end of file diff --git a/plugins/module_utils/import_handler.py b/plugins/module_utils/import_handler.py index a7b41a619..507dd2f65 100644 --- a/plugins/module_utils/import_handler.py +++ b/plugins/module_utils/import_handler.py @@ -15,8 +15,24 @@ class MissingZOAUImport(object): + """Error when importing ZOAU. + """ def __getattr__(self, name): def method(*args, **kwargs): + """Raises ImportError as a result of a failed ZOAU import. + + Parameters + ---------- + *args : dict + Arguments ordered in a dictionary. + **kwargs : dict + Arguments ordered in a dictionary. + + Raises + ------ + ImportError + Unable to import a module or library. + """ raise ImportError( ( "ZOAU is not properly configured for Ansible. Unable to import zoautil_py. " @@ -28,26 +44,34 @@ def method(*args, **kwargs): class ZOAUImportError(object): - """This class serves as a wrapper for any kind of error when importing - ZOAU. Since ZOAU is used by both modules and module_utils, we need a way - to alert the user when they're trying to use a function that couldn't be - imported properly. If we only had to deal with this in modules, we could - just validate that imports worked at the start of their main functions, - but on utils, we don't have an entry point where we can validate this. - Just raising an exception when trying the import would be better, but that - introduces a failure on Ansible sanity tests, so we can't do it. - - Instead, we'll replace what would've been a ZOAU library with this class, - and the moment ANY method gets called, we finally raise an exception. - """ - def __init__(self, exception_traceback): - """When creating a new instance of this class, we save the traceback + """This class serves as a wrapper for any kind of error when importing + ZOAU. Since ZOAU is used by both modules and module_utils, we need a way + to alert the user when they're trying to use a function that couldn't be + imported properly. If we only had to deal with this in modules, we could + just validate that imports worked at the start of their main functions, + but on utils, we don't have an entry point where we can validate this. + Just raising an exception when trying the import would be better, but that + introduces a failure on Ansible sanity tests, so we can't do it. + + Instead, we'll replace what would've been a ZOAU library with this class, + and the moment ANY method gets called, we finally raise an exception. + When creating a new instance of this class, we save the traceback from the original exception so that users have more context when their task/code fails. The expected traceback is a string representation of it, not an actual traceback object. By importing `traceback` from the standard library and calling `traceback.format_exc()` we can get this string. + + Parameters + ---------- + exception_traceback : str + The formatted traceback of the exception. + + Attributes + ---------- + exception_traceback : str + The formatted traceback of the exception. """ self.traceback = exception_traceback @@ -58,6 +82,20 @@ def __getattr__(self, name): an error while importing ZOAU. """ def method(*args, **kwargs): + """Raises ImportError as a result of a failed ZOAU import. + + Parameters + ---------- + *args : dict + Arguments ordered in a dictionary. + **kwargs : dict + Arguments ordered in a dictionary. + + Raises + ------ + ImportError + Unable to import a module or library. + """ raise ImportError( ( "ZOAU is not properly configured for Ansible. Unable to import zoautil_py. " @@ -71,10 +109,36 @@ def method(*args, **kwargs): class MissingImport(object): def __init__(self, import_name=""): + """Error when it is unable to import a module due to it being missing. + + Parameters + ---------- + import_name : str + The name of the module to import. + + Attributes + ---------- + import_name : str + The name of the module to import. + """ self.import_name = import_name def __getattr__(self, name): def method(*args, **kwargs): + """Raises ImportError as a result of trying to import a missing module. + + Parameter + --------- + *args : dict + Arguments ordered in a dictionary. + **kwargs : dict + Arguments ordered in a dictionary. + + Raises + ------ + ImportError + Unable to import a module or library. + """ raise ImportError("Import {0} was not available.".format(self.import_name)) return method From 92653c5c50c77c87fa7a7afe4a13a5a19b53878e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 22 Apr 2024 11:53:43 -0600 Subject: [PATCH 362/495] [Bugfix][1301]Work_around_fix_false_positive (#1340) * First iteration work around * Get the fix stable and return test case * Add clean response * Fix sanity * Add stderr * Fix case sensitive * Comment fail test case * Fix upper case * Add fragment * Retur test case * Change fragment * Add coment and all cases * Add absent double quotes and special cases * Fix ansible sanity --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1340-Work_around_fix_false_positive.yml | 4 + plugins/modules/zos_blockinfile.py | 124 +++++++++++++++--- .../modules/test_zos_blockinfile_func.py | 38 +++--- 3 files changed, 129 insertions(+), 37 deletions(-) create mode 100644 changelogs/fragments/1340-Work_around_fix_false_positive.yml diff --git a/changelogs/fragments/1340-Work_around_fix_false_positive.yml b/changelogs/fragments/1340-Work_around_fix_false_positive.yml new file mode 100644 index 000000000..8e8360808 --- /dev/null +++ b/changelogs/fragments/1340-Work_around_fix_false_positive.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_blockinfile - Using double quotation marks inside a block resulted in a false + positive result with ZOAU 1.3. Fix now handles this special case to avoid false negatives. + (https://github.com/ansible-collections/ibm_zos_core/pull/1340). \ No newline at end of file diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 8fd9701da..88f410cdb 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -414,6 +414,73 @@ def quotedString(string): return string.replace('"', "") +def quotedString_double_quotes(string): + # add escape if string was quoted + if not isinstance(string, str): + return string + return string.replace('"', '\\"') + + +def check_double_quotes(marker, ins_bef, ins_aft, block): + if marker: + if '"' in marker: + return True + if ins_bef: + if '"' in ins_bef: + return True + if ins_aft: + if '"' in ins_aft: + return True + if block: + if '"' in block: + return True + return False + + +def execute_dmod(src, block, marker, force, encoding, state, module, ins_bef=None, ins_aft=None): + block = block.replace('"', '\\"') + force = "-f" if force else "" + encoding = "-c {0}".format(encoding) if encoding else "" + marker = "-m \"{0}\"".format(marker) if marker else "" + if state: + if ins_aft: + if ins_aft == "EOF": + opts = f'"$ a\\{block}" "{src}"' + else: + opts = f'-s -e "/{ins_aft}/a\\{block}/$" -e "$ a\\{block}" "{src}"' + elif ins_bef: + if ins_bef == "BOF": + opts = f' "1 i\\{block}" "{src}" ' + else: + opts = f'-s -e "/{ins_bef}/i\\{block}/$" -e "$ a\\{block}" "{src}"' + + cmd = "dmod -b {0} {1} {2} {3}".format(force, encoding, marker, opts) + else: + cmd = """dmod -b {0} {1} {2} "//d" {4}""".format(force, encoding, marker, src) + + rc, stdout, stderr = module.run_command(cmd) + cmd = clean_command(cmd) + return rc, cmd + + +def clean_command(cmd): + cmd = cmd.replace('/c\\\\', '') + cmd = cmd.replace('/a\\\\', '', ) + cmd = cmd.replace('/i\\\\', '', ) + cmd = cmd.replace('$ a\\\\', '', ) + cmd = cmd.replace('1 i\\\\', '', ) + cmd = cmd.replace('/c\\', '') + cmd = cmd.replace('/a\\', '') + cmd = cmd.replace('/i\\', '') + cmd = cmd.replace('$ a\\', '') + cmd = cmd.replace('1 i\\', '') + cmd = cmd.replace('/d', '') + cmd = cmd.replace('\\\\d', '') + cmd = cmd.replace('\\n', '\n') + cmd = cmd.replace('\\"', '"') + return cmd + + def main(): module = AnsibleModule( argument_spec=dict( @@ -553,6 +620,7 @@ def main(): module.fail_json(msg=message) file_type = 0 + return_content = None if backup: # backup can be True(bool) or none-zero length string. string indicates that backup_name was provided. # setting backup to None if backup_name wasn't provided. if backup=None, Backup module will use @@ -566,29 +634,47 @@ def main(): result['backup_name'] = Backup.mvs_file_backup(dsn=src, bk_dsn=backup, tmphlq=tmphlq) except Exception as err: module.fail_json(msg="Unable to allocate backup {0} destination: {1}".format(backup, str(err))) + double_quotes_exists = check_double_quotes(marker, ins_bef, ins_aft, block) # state=present, insert/replace a block with matching regex pattern # state=absent, delete blocks with matching regex pattern if parsed_args.get('state') == 'present': - return_content = present(src, block, marker, ins_aft, ins_bef, encoding, force) + if double_quotes_exists: + rc, cmd = execute_dmod(src, block, quotedString_double_quotes(marker), force, encoding, True, module=module, + ins_bef=quotedString_double_quotes(ins_bef), ins_aft=quotedString_double_quotes(ins_aft)) + result['rc'] = rc + result['cmd'] = cmd + result['changed'] = True if rc == 0 else False + stderr = 'Failed to insert new entry' if rc != 0 else "" + else: + return_content = present(src, block, marker, ins_aft, ins_bef, encoding, force) else: - return_content = absent(src, marker, encoding, force) - stdout = return_content.stdout_response - stderr = return_content.stderr_response - rc = return_content.rc - stdout = stdout.replace('/d', '\\\\d') - try: - # Try to extract information from stdout - # The triple double quotes is required for special characters (/_) been scape - ret = json.loads("""{0}""".format(stdout)) - except Exception: - messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) - if result.get('backup_name'): - messageDict['backup_name'] = result['backup_name'] - module.fail_json(**messageDict) - - result['cmd'] = ret['data']['commands'] - result['changed'] = ret['data']['changed'] - result['found'] = ret['data']['found'] + if double_quotes_exists: + rc, cmd = execute_dmod(src, block, quotedString_double_quotes(marker), force, encoding, False, module=module) + result['rc'] = rc + result['cmd'] = cmd + result['changed'] = True if rc == 0 else False + stderr = 'Failed to remove entry' if rc != 0 else "" + else: + return_content = absent(src, marker, encoding, force) + # ZOAU 1.3.0 generate false positive working with double quotes (") the call generate distinct return when using and not + if not double_quotes_exists: + stdout = return_content.stdout_response + stderr = return_content.stderr_response + rc = return_content.rc + stdout = stdout.replace('/d', '\\\\d') + try: + # Try to extract information from stdout + # The triple double quotes is required for special characters (/_) been scape + ret = json.loads("""{0}""".format(stdout)) + except Exception: + messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) + if result.get('backup_name'): + messageDict['backup_name'] = result['backup_name'] + module.fail_json(**messageDict) + + result['cmd'] = ret['data']['commands'] + result['changed'] = ret['data']['changed'] + result['found'] = ret['data']['found'] # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case # That information will be given with 'changed' and 'found' if len(stderr): diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 508a2ce8d..635da733e 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -864,24 +864,25 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): remove_uss_environment(ansible_zos_module) # Test case base on bug of dataset.blockifile -# GH Issue #1258 -#@pytest.mark.uss -#def test_uss_block_insert_with_doublequotes(ansible_zos_module): -# hosts = ansible_zos_module -# params = dict(insertafter="sleep 30;", block='cat "//OMVSADMI.CAT"\ncat "//OMVSADM.COPYMEM.TESTS" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") -# full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] -# content = TEST_CONTENT_DOUBLEQUOTES -# try: -# set_uss_environment(ansible_zos_module, content, full_path) -# params["path"] = full_path -# results = hosts.all.zos_blockinfile(**params) -# for result in results.contacted.values(): -# assert result.get("changed") == 1 -# results = hosts.all.shell(cmd="cat {0}".format(params["path"])) -# for result in results.contacted.values(): -# assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES -# finally: -# remove_uss_environment(ansible_zos_module) +# GH Issue #1258 +@pytest.mark.uss +def test_uss_block_insert_with_doublequotes(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="sleep 30;", block='cat "//OMVSADMI.CAT"\ncat "//OMVSADM.COPYMEM.TESTS" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") + full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + content = TEST_CONTENT_DOUBLEQUOTES + try: + set_uss_environment(ansible_zos_module, content, full_path) + params["path"] = full_path + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + print(result) + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + for result in results.contacted.values(): + assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES + finally: + remove_uss_environment(ansible_zos_module) @pytest.mark.uss @@ -1412,6 +1413,7 @@ def test_ds_not_supported(ansible_zos_module, dstype): hosts.all.zos_data_set(name=ds_name, state="absent") +# Enhancemed #1339 @pytest.mark.ds @pytest.mark.parametrize("dstype", ["pds","pdse"]) def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): From 9503a19090b1f3ea85148b311f37db5d164fadd6 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 22 Apr 2024 16:33:22 -0400 Subject: [PATCH 363/495] Remove stack fault on longer running jobs (#1383) * commit with partial changelog. fix in job.py and correction to test module. * updated pr link in changelog fragment * Update job.py --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/971-bug-job_submit-can-stacktrace.yml | 6 ++++++ plugins/module_utils/job.py | 9 +++------ tests/functional/modules/test_zos_job_submit_func.py | 1 - 3 files changed, 9 insertions(+), 7 deletions(-) create mode 100644 changelogs/fragments/971-bug-job_submit-can-stacktrace.yml diff --git a/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml b/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml new file mode 100644 index 000000000..e02daed4c --- /dev/null +++ b/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml @@ -0,0 +1,6 @@ +trivial: + - job.py - generalized resolution of query_exception that may be thrown. + This should prevent the stack trace. + (https://github.com/ansible-collections/ibm_zos_core/pull/1383). + - test_zos_job_submit.py - Removed code that was hiding if a duration was not returned. + (https://github.com/ansible-collections/ibm_zos_core/pull/1383). diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 72b72a90b..c25789030 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -386,9 +386,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T try: list_of_dds = jobs.list_dds(entry.job_id) except exceptions.DDQueryException as err: - if 'BGYSC5201E' in str(err): - is_dd_query_exception = True - pass + is_dd_query_exception = True # Check if the Job has JESJCL, if not, its in the JES INPUT queue, thus wait the full wait_time_s. # Idea here is to force a TYPRUN{HOLD|JCLHOLD|COPY} job to go the full wait duration since we have @@ -409,9 +407,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False except exceptions.DDQueryException as err: - if 'BGYSC5201E' in str(err): - is_dd_query_exception = True - continue + is_dd_query_exception = True + continue job["duration"] = duration for single_dd in list_of_dds: diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 34fb39d4b..1e231f60d 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -695,7 +695,6 @@ def test_job_submit_max_rc(ansible_zos_module, args): #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" # - Consider using module zos_job_query to poll for a long running job or # increase option \\'wait_times_s` to a value greater than 10.", - duration = result.get('duration') if duration >= args["wait_time_s"]: From d450ee54aef25c43cf2b9b6a2c48a48456b1f6de Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 2 May 2024 11:56:51 -0400 Subject: [PATCH 364/495] Enhancement/1170/make pipeline 217 compatible (#1452) * add ignore 2.17 to remove git license warnings * added changelog fragment * corrected 2 issues in ignore involving ping.rexx * changing pipe import to use shlex * changed header string from : to - to pass frag lint * updated changelog to include PR number * removed old pipe import line --- ...hancememt-make-pipeline-217-compatible.yml | 3 +++ .../modules/test_zos_data_set_func.py | 2 +- tests/sanity/ignore-2.17.txt | 25 +++++++++++++++++++ 3 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1170-enhancememt-make-pipeline-217-compatible.yml create mode 100644 tests/sanity/ignore-2.17.txt diff --git a/changelogs/fragments/1170-enhancememt-make-pipeline-217-compatible.yml b/changelogs/fragments/1170-enhancememt-make-pipeline-217-compatible.yml new file mode 100644 index 000000000..92f2d99b5 --- /dev/null +++ b/changelogs/fragments/1170-enhancememt-make-pipeline-217-compatible.yml @@ -0,0 +1,3 @@ +trivial: + - test system - added ignore to sanity + (https://github.com/ansible-collections/ibm_zos_core/pull/1452). diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index f96bfabdc..7dc1d9073 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -18,7 +18,7 @@ import pytest import time import subprocess -from pipes import quote +from shlex import quote from pprint import pprint from ibm_zos_core.tests.helpers.volumes import Volume_Handler diff --git a/tests/sanity/ignore-2.17.txt b/tests/sanity/ignore-2.17.txt new file mode 100644 index 000000000..7ae119205 --- /dev/null +++ b/tests/sanity/ignore-2.17.txt @@ -0,0 +1,25 @@ +plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.rexx validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 From ff38d5d0c45b8842bb249a99b57627604b9b1977 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 2 May 2024 12:17:55 -0600 Subject: [PATCH 365/495] [Enabler][module_utils/data_set] Add GDG and GDS name resolve functions. (#1467) * modified DatasetCreatedError message * Added gdg functions * Created unit test for validating gds relative name * Updated to fail when future gen * Update arg parser * Add escaping function for data set names * Add unit tests for name escaping * Remove calls to escape_data_set_name * renamed tests * Added MVSDataset class * Updated escaped symbols * Updated tests * Added utils * Add changelog * Uncommented test * Updated exception * Updated mvsdataset class * Updated class * Updated copyright year and uncommented test case --------- Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> --- .../fragments/1386-gdg-symbols-support.yml | 3 + plugins/module_utils/better_arg_parser.py | 7 +- plugins/module_utils/data_set.py | 158 +++++++++++++++++- .../module_utils/test_arg_parser.py | 14 +- tests/unit/test_data_set_utils.py | 52 ++++++ 5 files changed, 227 insertions(+), 7 deletions(-) create mode 100644 changelogs/fragments/1386-gdg-symbols-support.yml create mode 100644 tests/unit/test_data_set_utils.py diff --git a/changelogs/fragments/1386-gdg-symbols-support.yml b/changelogs/fragments/1386-gdg-symbols-support.yml new file mode 100644 index 000000000..d920172b8 --- /dev/null +++ b/changelogs/fragments/1386-gdg-symbols-support.yml @@ -0,0 +1,3 @@ +trivial: + - module_utils/data_set.py - Added new functions to support GDG and symbols. + (https://github.com/ansible-collections/ibm_zos_core/pull/1467). \ No newline at end of file diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index 6720f8d10..ef361e7f8 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -329,7 +329,7 @@ def _data_set_type(self, contents, resolve_dependencies): str -- The arguments contents after any necessary operations. """ if not fullmatch( - r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}$", + r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)|\(([-+]?[0-9]+)\)){0,1}$", str(contents), IGNORECASE, ): @@ -541,7 +541,8 @@ def _data_set_or_path_type(self, contents, resolve_dependencies): str -- The arguments contents after any necessary operations. """ if not fullmatch( - r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}$", + # HLQ and all middle level qualifiers. Last qualifier bef members. Normal members. GDS members. + r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)|\(([-+]?[0-9]+)\)){0,1}$", str(contents), IGNORECASE, ): diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 40c1a4047..a4910c6fa 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -40,10 +40,12 @@ vtoc = MissingImport("vtoc") try: - from zoautil_py import datasets, exceptions + from zoautil_py import datasets, exceptions, gdgs except ImportError: datasets = ZOAUImportError(traceback.format_exc()) exceptions = ZOAUImportError(traceback.format_exc()) + gdgs = ZOAUImportError(traceback.format_exc()) + Dataset = ZOAUImportError(traceback.format_exc()) class DataSet(object): @@ -184,7 +186,7 @@ def ensure_present( except DatasetCreateError as e: raise_error = True # data set exists on volume - if "DatasetVerificationError" in e.msg or "Error Code: 0x4704" in e.msg: + if "Error Code: 0x4704" in e.msg: present, changed = DataSet.attempt_catalog_if_necessary( name, volumes ) @@ -1322,6 +1324,84 @@ def _is_vsam_from_listcat(name): return True return False + @staticmethod + def is_gds_relative_name(name): + """Determine if name is a gdg relative name based + on the GDS relative name syntax eg. 'USER.GDG(-2)'. + + Parameters + ---------- + name : str + Data set name to determine if is a GDS relative name. + + Returns + ------- + bool + Whether the name is a GDS relative name. + """ + pattern = r'(.+)\(([\\]?[+-]?\d+)\)' + match = re.fullmatch(pattern, name) + return bool(match) + + @staticmethod + def resolve_gds_absolute_name(relative_name): + """Given a GDS relative name, returns its absolute name. + + Parameters + ---------- + relative_name : str + GDS relative name to be resolved. + + Returns + ------- + str + GDS absolute name. + + Raises + ------ + GDSNameResolveError + Error resolving the GDS relative name, either because + the name is not a valid GDS syntax or failure to retrieve + the GDG data based on the gdg base name. + """ + pattern = r'(.+)\(([\\]?[-+]?\d+)\)' + match = re.search(pattern, relative_name) + try: + gdg_base = match.group(1) + rel_generation = int(match.group(2)) + if rel_generation > 0: + # Fail if we are trying to resolve a future generation. + raise Exception + gdg = gdgs.GenerationDataGroupView(name=gdg_base) + generations = gdg.generations() + absolute_name = generations[rel_generation - 1] + except Exception as e: + raise GDSNameResolveError(relative_name) + + return absolute_name + + @staticmethod + def escape_data_set_name(name): + """Escapes special characters ($, @, #) inside a data set name. + + Parameters + ---------- + name : str + Name of the data set. + + Returns + ------- + str + Escaped data set name. + """ + special_chars = ['$', '@', '#', '-'] + escaped_name = name.replace('\\', '') + + for char in special_chars: + escaped_name = escaped_name.replace(char, f"\\{char}") + + return escaped_name + @staticmethod def temp_name(hlq=""): """Get temporary data set name. @@ -1717,6 +1797,69 @@ def _process_listcat_output(self, output): return result +class MVSDataSet(): + """ + This class represents a z/OS data set that can be yet to be created or + already created in the system. It encapsulates the data set attributes + to easy access. + + """ + def __init__( + self, + name, + data_set_type, + state, + organization, + record_format, + volumes, + block_size, + record_length, + space_primary, + space_secondary, + space_type, + directory_blocks, + key_length, + key_offset, + sms_storage_class, + sms_data_class, + sms_management_class, + total_space, + used_space, + last_referenced, + ): + self.name = name + self.organization = organization + self.record_format = record_format + self.volumes = volumes + self.block_size = block_size + self.record_length = record_length + self.total_space = total_space + self.used_space = used_space + self.last_referenced = last_referenced + self.raw_name = name + self.data_set_type = data_set_type + self.state = state + self.space_primary = space_primary + self.space_secondary = space_secondary + self.space_type = space_type + self.directory_blocks = directory_blocks + self.key_length = key_length + self.key_offset = key_offset + self.sms_storage_class = sms_storage_class + self.sms_data_class = sms_data_class + self.sms_management_class = sms_management_class + self.volumes = volumes + self.is_gds_active = False + # If name has escaped chars or is GDS relative name we clean it. + self.name = DataSet.escape_data_set_name(self.name) + if DataSet.is_gds_relative_name(self.name): + try: + self.name = DataSet.resolve_gds_absolute_name(self.name) + except Exception as e: + # This means the generation is a positive version so is only used for creation. + self.is_gds_active = False + + def is_member(data_set): """Determine whether the input string specifies a data set member""" try: @@ -1899,3 +2042,12 @@ def __init__(self, data_set): "Close the dataset and try again".format(data_set) ) super().__init__(self.msg) + + +class GDSNameResolveError(Exception): + def __init__(self, data_set): + self.msg = ( + "Error resolving relative generation data set name. {0} " + "Make sure the generation exists and is active.".format(data_set) + ) + super().__init__(self.msg) diff --git a/tests/functional/module_utils/test_arg_parser.py b/tests/functional/module_utils/test_arg_parser.py index ebd4fbee6..c27885b7a 100644 --- a/tests/functional/module_utils/test_arg_parser.py +++ b/tests/functional/module_utils/test_arg_parser.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -734,6 +734,9 @@ def special_names_get_uppercase(value, dependencies, kwargs): ("data_set", "easy.dat@.s$t"), ("data_set", "e##@y.dat#@.set(h$ll0)"), ("data_set", "easy.da-a.set(######)"), + ("data_set", "easy.data.gdg(+2)"), + ("data_set", "easy.data.gdg(-1)"), + ("data_set", "easy.data.gdg(0)"), ("data_set_base", "easy.data.set"), ("data_set_base", "$asy.d@ta.$et"), ("data_set_base", "easy.dat@.s$t"), @@ -746,6 +749,9 @@ def special_names_get_uppercase(value, dependencies, kwargs): ("data_set_or_path", "easy.data.set(######)"), ("data_set_or_path", "e##@y.dat#@.set(hello)"), ("data_set_or_path", "easy.data.set(helloo)"), + ("data_set_or_path", "easy.data.gdg(+2)"), + ("data_set_or_path", "easy.data.gdg(-1)"), + ("data_set_or_path", "easy.data.gdg(0)"), ("data_set_or_path", "/usr/lpp/rsusr"), ], ) @@ -768,6 +774,9 @@ def test_data_set_type_no_invalid(arg_type, name): ("data_set", "$asy.d@ta.$et(0helo)"), ("data_set", "-##@y.dat#@.set(h$ll0)"), ("data_set", "1asy.da-a.set(######)"), + ("data_set", "easy.data.gdg(+2+)"), + ("data_set", "easy.data.gdg(--1)"), + ("data_set", "easy.data.gdg(-+1)"), ("data_set_base", "-asy.data.set"), ("data_set_base", "$asy.d@ta.$etdafsfsdfad"), ("data_set_member", "e##@y.dat#@.set(h$l-l0)"), @@ -779,6 +788,9 @@ def test_data_set_type_no_invalid(arg_type, name): ("data_set_or_path", "3asy.data.set(######)"), ("data_set_or_path", "e#^#@y.dat#@.set(hello)"), ("data_set_or_path", "easy.5at@@a.set(helloo)"), + ("data_set_or_path", "easy.data.gdg(+2+)"), + ("data_set_or_path", "easy.data.gdg(--1)"), + ("data_set_or_path", "easy.data.gdg(-+1)"), # ("data_set_or_path", "../lpp/rsusr"), ], ) diff --git a/tests/unit/test_data_set_utils.py b/tests/unit/test_data_set_utils.py new file mode 100644 index 000000000..f936b4134 --- /dev/null +++ b/tests/unit/test_data_set_utils.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ibm_zos_core.plugins.module_utils.data_set import ( + DataSet +) + +import pytest + + +gds_relative_test_data = [ + {"name": "USER.GDG(+1)", "valid_gds" : True}, + {"name": "USER.GDG(-3)", "valid_gds" : True}, + {"name": "USER.GDG(0)", "valid_gds" : True}, + {"name": "USER.GDG(+22)", "valid_gds" : True}, + {"name": "USER.GDG(-33)", "valid_gds" : True}, + {"name": "USER.GDG(MEMBER)", "valid_gds" : False}, + {"name": "USER.GDG.TEST", "valid_gds" : False}, + {"name": "USER.GDG(\-33)", "valid_gds": True}, + ] + +@pytest.mark.parametrize("gds", gds_relative_test_data) +def test_gds_valid_relative_name(gds): + assert gds["valid_gds"] == DataSet.is_gds_relative_name(gds["name"]) + + +special_chars_test_data = [ + {"name": "USER.SPECIAL.@TEST", "escaped_name" : r"USER.SPECIAL.\@TEST"}, + {"name": "USER.SPECIAL.A$A", "escaped_name" : r"USER.SPECIAL.A\$A"}, + {"name": "USER.SPECIAL.$TEST#@", "escaped_name" : r"USER.SPECIAL.\$TEST\#\@"}, + {"name": "USER.SPECIAL.TEST", "escaped_name" : r"USER.SPECIAL.TEST"}, + ] + +@pytest.mark.parametrize("data_set", special_chars_test_data) +def test_data_set_name_escaping(data_set): + print(data_set) + + assert data_set["escaped_name"] == DataSet.escape_data_set_name(data_set["name"]) From 2bc3854930ccc9e22fb15114e4b6a5ac41207b89 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Thu, 2 May 2024 11:29:30 -0700 Subject: [PATCH 366/495] Update ac tool to support ansible-lint 6.22 and expanded beyond RSA support. (#1484) * Update ac tool to support ansible-lint 6.22 and added more than rsa support Signed-off-by: ddimatos <dimatos@gmail.com> * update mount table Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac to support selecting which managed venv to start or stop Signed-off-by: ddimatos <dimatos@gmail.com> * update ac tool Signed-off-by: ddimatos <dimatos@gmail.com> * Added support to create a managed venv from the latst ansible devel soure Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with spelling corrections Signed-off-by: ddimatos <dimatos@gmail.com> * Update mount table with correct mount point Signed-off-by: ddimatos <dimatos@gmail.com> * scripts/mounts.sh Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- ac | 81 +++++++++++++--- .../1484-update-ac-tool-ansible-lint.yml | 4 + scripts/mounts.env | 35 +++---- scripts/mounts.sh | 36 ++++++-- scripts/requirements-2.12.env | 3 +- scripts/requirements-2.13.env | 5 +- scripts/requirements-2.14.env | 5 +- scripts/requirements-2.15.env | 3 +- scripts/requirements-2.16.env | 1 + scripts/requirements-2.17.env | 33 +++++++ scripts/requirements-latest.env | 5 +- scripts/venv.sh | 92 ++++++++++++++----- 12 files changed, 240 insertions(+), 63 deletions(-) create mode 100644 changelogs/fragments/1484-update-ac-tool-ansible-lint.yml mode change 100644 => 100755 scripts/mounts.sh create mode 100644 scripts/requirements-2.17.env diff --git a/ac b/ac index 9aee6a02d..652ba7099 100755 --- a/ac +++ b/ac @@ -34,6 +34,7 @@ normalize_version() { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; } +# Method determines the lastest (highest number) version venv that is managed by ./ac latest_venv(){ dir_version_latest="0" test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` @@ -48,6 +49,33 @@ latest_venv(){ fi } +# Method will take a venv name such as venv-2.16 and validate that it exists +validate_venv(){ + option_venv=$1 + #test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` + + if [[ "$option_venv" =~ "latest" ]]; then + test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-latest* 2>/dev/null` + if [[ "$test_for_managed_venv" =~ "latest" ]]; then + dir_version_latest=$option_venv + fi + #elif [ ! -z "$test_for_managed_venv" ]; then + else + for dir_version in `ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* | rev | cut -d"/" -f1| rev`; do + if [ $dir_version == $option_venv ]; then + dir_version_latest=$dir_version + fi + done + fi + + if [ ! -z "$dir_version_latest" ]; then + echo "${VENV_HOME_MANAGED}"/$dir_version_latest + else + echo "Unable to validate managed venv option $option_venv, exiting." + exit + fi +} + VENV=`latest_venv` file="" @@ -635,11 +663,24 @@ venv_setup(){ # TODO: Allow user to specify which venv they can start # ------------------------------------------------------------------------------ #->venv-start: -## Activate the lastest ansible managed virtual environment. -## Usage: ac [--venv-start] +## Activate the latest ansible managed virtual environment or optionally start +## by its name. +## Usage: ac [--venv-start --name <venv name>] ## Example: +## $ ac --venv-start --name venv-2.16 ## $ ac --venv-start venv_start(){ + option_name=$1 + + if [ "$option_name" ]; then + VENV=`validate_venv $option_name` + + if [ ! -z "$VENV" ]; then + VENV_BIN=$VENV/bin + VENV_BASENAME=`basename $VENV` + fi + fi + message "Starting managed python virtual environment: $VENV_BASENAME" #. $VENV_BIN/activate; exec /bin/sh -i /bin/bash -c ". $VENV_BIN/activate; exec /bin/sh -i" @@ -650,14 +691,27 @@ venv_start(){ # TODO: Allow user to specify which venv they can stop # ------------------------------------------------------------------------------ #->venv-stop: -## Deactivate the lastest ansible managed virtual environment. -## Usage: ac [--venv-stop] +## Deactivate the latest ansible managed virtual environment or optionally deactivate +## by its name. +## Usage: ac [--venv-stop --name <venv name>]] ## Example: +## $ ac --venv-stop --name venv-2.16 ## $ ac --venv-stop venv_stop(){ + option_name=$1 + + if [ "$option_name" ]; then + VENV=`validate_venv $option_name` + + if [ ! -z "$VENV" ]; then + VENV_BIN=$VENV/bin + VENV_BASENAME=`basename $VENV` + fi + fi + message "Stopping managed ansible virtual environment located at: $VENV_BASENAME" message "ac --venv-stop does not actually currently work, use CNTL-D" - . deactivate $VENV_BASENAME; + . deactivate $VENV_BASENAME 2>/dev/null; } # ============================================================================== @@ -691,11 +745,11 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-build" ;; - --ac-galaxy-importer) # Command + --ac-galaxy-importer) # Command ensure_managed_venv_exists $1 option_submitted="--ac-galaxy-importer" ;; - --ac-changelog) # Command + --ac-changelog) # Command ensure_managed_venv_exists $1 option_submitted="--ac-changelog" ;; @@ -762,7 +816,7 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--venv-stop" ;; - --command|--command=?*) # option + --command|--command=?*) # option command=`option_processor $1 $2` option_sanitize $command shift @@ -777,7 +831,7 @@ while true; do option_sanitize $file shift ;; - --host|--host=?*) # option + --host|--host=?*) # option host=`option_processor $1 $2` option_sanitize $host shift @@ -787,6 +841,11 @@ while true; do option_sanitize $level shift ;; + --name|--name=?*) # option + name=`option_processor $1 $2` + option_sanitize $name + shift + ;; --out-file|--out-file=?*) # option out_file=`option_processor $1 $2` option_sanitize $out_file @@ -882,7 +941,7 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-nodes" ] ; then elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-setup" ] ; then venv_setup $password elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-start" ] ; then - venv_start + venv_start $name elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-stop" ] ; then - venv_stop + venv_stop $name fi diff --git a/changelogs/fragments/1484-update-ac-tool-ansible-lint.yml b/changelogs/fragments/1484-update-ac-tool-ansible-lint.yml new file mode 100644 index 000000000..bb5f247a9 --- /dev/null +++ b/changelogs/fragments/1484-update-ac-tool-ansible-lint.yml @@ -0,0 +1,4 @@ +trivial: + - ac - Update ac tool with ansible-lint 6.22, update mount tables, add + support so any venv can be started. + (https://github.com/ansible-collections/ibm_zos_core/pull/1484). \ No newline at end of file diff --git a/scripts/mounts.env b/scripts/mounts.env index 7240eaaeb..18eae5ce1 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -28,21 +28,20 @@ # data_set - the z/OS data set containing the binaries to mount # space - must be a space before the closing quote # ------------------------------------------------------------------------------ -zoau_mount_list_str="1:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ -"2:1.0.0-ga:/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS "\ -"3:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS "\ -"6:1.0.2-ga:/zoau/v1.0.2-ga:IMSTESTU.ZOAU.V102.GA.ZFS "\ -"7:1.0.3-ga5:/zoau/v1.0.3-ga5:IMSTESTU.ZOAU.V103.GA5.ZFS "\ -"8:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ -"9:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ -"10:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ -"11:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ -"12:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ -"13:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ -"14:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ -"15:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V125.ZFS "\ -"16:1.3.0:/zoau/v1.3.0:IMSTESTU.ZOAU.V130.ZFS "\ -"17:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " +zoau_mount_list_str="1:1.0.0-ga:/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS "\ +"2:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS "\ +"3:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ +"4:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ +"5:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ +"6:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ +"7:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ +"8:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ +"9:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ +"10:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ +"11:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V102.GA.ZFS "\ +"12:1.3.0:/zoau/v1.3.0:IMSTESTU.ZOAU.V103.GA5.ZFS "\ +"13:1.3.1:/zoau/v1.3.1:IMSTESTU.ZOAU.V130.ZFS "\ +"14:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " # ------------------------------------------------------------------------------ # PYTHON MOUNT TABLE @@ -60,7 +59,8 @@ python_mount_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz:/allpyt "4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz:/allpython/3.10:IMSTESTU.PYZ.V3A09.ZFS "\ "5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11:IMSTESTU.PYZ.V3B02.ZFS "\ "6:3.11-ga:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS "\ -"7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-3:IMSTESTU.PYZ.V3B03.ZFS " +"7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-3:IMSTESTU.PYZ.V3B03.ZFS "\ +"8:3.12:/allpython/3.12/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.12:IMSTESTU.PYZ.V3C0.ZFS " # ------------------------------------------------------------------------------ # PYTHON PATH POINTS @@ -78,4 +78,5 @@ python_path_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz "\ "4:3.10:/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz "\ "5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz "\ "6:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz "\ -"7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz " \ No newline at end of file +"7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz "\ +"8:3.12:/allpython/3.12/usr/lpp/IBM/cyp/v3r12/pyz " \ No newline at end of file diff --git a/scripts/mounts.sh b/scripts/mounts.sh old mode 100644 new mode 100755 index 7ce7252ca..a244bc6d6 --- a/scripts/mounts.sh +++ b/scripts/mounts.sh @@ -24,7 +24,16 @@ # ------------------------------------------------------------------------------ # Globals # ------------------------------------------------------------------------------ -cd $(dirname $0) +#cd $(dirname $0) + +script_directory=$(cd -- "$(dirname -- "$0")" 2>/dev/null && pwd) + +# Depending on from where the file is sourced it can result in null so default it to . +if [ ! -n "$script_directory" ]; then + script_directory="." +fi + +cd ${script_directory} # Current shell, bash returns 'bash' CURR_SHELL=`echo $$ $SHELL | cut -d " " -f 2 | sed 's|.*/||'` @@ -245,16 +254,31 @@ mount(){ # If zoau_mounted_data_set is empty or does not match expected, it means we should perform the mount if [ "$zoau_mounted_data_set" != "$zoau_data_set" ]; then - echo "Mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." - - # If zoau_mounted_data_set not empty, compare the mount points and if they match, then unmount. - # Note, the mount point could be root (/) waitng for children so lets compare before unmounting. + # If zoau_mounted_data_set not empty, compare the mount points and if they match, then continue below if [ ! -z "${zoau_mounted_data_set}" ]; then temp_mount=`df ${zoau_mount} 2>/dev/null | tr -s [:blank:] | tail -n +2 |cut -d' ' -f 1` + + # If zoau_mount is mounted to a different data set it means there has been a mount table update + # and it should be remounted with the new data set. + if [ "${zoau_mounted_data_set}" != "${zoau_data_set}" ]; then + # If the data set is mounted elsewhere, it needs to be unmounted so the mount command can succeed, + # thus zoau_to_unmount will eval to where the zoau_data_set might be mounted. + zoau_to_unmount=`df |grep ${zoau_data_set} | cut -d' ' -f 1` + if [ ! -z "${zoau_to_unmount}" ]; then + echo "Unmouting path ${zoau_to_unmount} from data set ${zoau_data_set} so that the data set can be mounted to ${zoau_mount}." + /usr/sbin/unmount ${zoau_to_unmount} 2>/dev/null + fi + fi + + # If the mount points match then unmount so that a mount can be performed because it could mean the + # data set has been refreshed. if [ "${zoau_mount}" = "${temp_mount}" ]; then - /usr/sbin/unmount ${zoau_mount} + # If you try to unmount / because that is where zoau_mount evals to currently, consume the error + echo "Unmouting path ${zoau_mount} from data set ${zoau_data_set} so that the mount point can be refreshed with the data set." + /usr/sbin/unmount ${zoau_mount} 2>/dev/null fi fi + echo "Mouting ZOAU ${zoau_version} on data set ${zoau_data_set} to path ${zoau_mount}." mkdir -p ${zoau_mount} /usr/sbin/mount ${1} ${zoau_data_set} ${zoau_mount} else diff --git a/scripts/requirements-2.12.env b/scripts/requirements-2.12.env index 4f6add957..630b617ad 100644 --- a/scripts/requirements-2.12.env +++ b/scripts/requirements-2.12.env @@ -25,8 +25,9 @@ requirements=( "ansible-core:2.12.10" "pylint" "rstcheck" +"ansible-lint:6.22.2" ) python=( "<=:python:3.10" -) \ No newline at end of file +) diff --git a/scripts/requirements-2.13.env b/scripts/requirements-2.13.env index cfce646d0..a649e0cf7 100644 --- a/scripts/requirements-2.13.env +++ b/scripts/requirements-2.13.env @@ -22,11 +22,12 @@ # ============================================================================== requirements=( -"ansible-core:2.13.7" +"ansible-core:2.13.13" "pylint" "rstcheck" +"ansible-lint:6.22.2" ) python=( "<=:python:3.10" -) \ No newline at end of file +) diff --git a/scripts/requirements-2.14.env b/scripts/requirements-2.14.env index f1c423f8b..9b4c12673 100644 --- a/scripts/requirements-2.14.env +++ b/scripts/requirements-2.14.env @@ -22,11 +22,12 @@ # ============================================================================== requirements=( -"ansible-core:2.14.1" +"ansible-core:2.14.16" "pylint" "rstcheck" +"ansible-lint:6.22.2" ) python=( "<=:python:3.11" -) \ No newline at end of file +) diff --git a/scripts/requirements-2.15.env b/scripts/requirements-2.15.env index 3d94e55af..7f0f42b1b 100644 --- a/scripts/requirements-2.15.env +++ b/scripts/requirements-2.15.env @@ -22,9 +22,10 @@ # ============================================================================== requirements=( -"ansible-core:2.15.0" +"ansible-core:2.15.11" "pylint" "rstcheck" +"ansible-lint:6.22.2" ) python=( diff --git a/scripts/requirements-2.16.env b/scripts/requirements-2.16.env index 2d0ed42a1..1ac4c4fa4 100644 --- a/scripts/requirements-2.16.env +++ b/scripts/requirements-2.16.env @@ -25,6 +25,7 @@ requirements=( "ansible-core:2.16.3" "pylint" "rstcheck" +"ansible-lint:6.22.2" ) python=( diff --git a/scripts/requirements-2.17.env b/scripts/requirements-2.17.env new file mode 100644 index 000000000..c61c03626 --- /dev/null +++ b/scripts/requirements-2.17.env @@ -0,0 +1,33 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"ansible-core:2.17.0b1" +"pylint" +"rstcheck" +"ansible-lint:6.22.2" +) + +python=( +"<=:python:3.12" +) diff --git a/scripts/requirements-latest.env b/scripts/requirements-latest.env index 505ef1261..27e9ffdc4 100644 --- a/scripts/requirements-latest.env +++ b/scripts/requirements-latest.env @@ -21,11 +21,14 @@ # eg venvs = [requirements, requirements2, requirments3] # ============================================================================== +# You can install the devel branch of ansible-core directly from GitHub with pip, +# venv.sh needs to be updated to support this. +# pip install https://github.com/ansible/ansible/archive/devel.tar.gz requirements=( "ansible-core:latest" ) python=( -"python:3.9" +"<=:python:3.12" ) \ No newline at end of file diff --git a/scripts/venv.sh b/scripts/venv.sh index 56756d16e..597aeee23 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -103,8 +103,9 @@ echo_requirements(){ fi done - #for file in `ls requirements-*.sh`; do - for file in `ls requirements-[0-9].[0-9]*.env`; do + # for file in `ls requirements-*.sh`; do + # for file in `ls requirements-[0-9].[0-9]*.env`; do + for file in `ls *requirements-[0-9].[0-9]*.env* *requirements-latest* 2>/dev/null`; do # Unset the vars from any prior sourced files unset REQ unset requirements @@ -116,9 +117,16 @@ echo_requirements(){ echo "Unable to source file: $file." fi - ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` - venv_name="venv"-$ansible_version - echo $venv_name + if [[ "$file" =~ "latest" ]]; then + # eg extract 'latest' from requirements-latest file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` + venv_name="venv"-$ansible_version + else + # eg extract 2.14 from requirements-2.14.sh file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + #echo $venv_name + fi for pkg in "${requirements[@]}" ; do key=${pkg%%:*} @@ -127,6 +135,8 @@ echo_requirements(){ REQ=${REQ}"$key;\\n" elif [ -z "$value" ]; then REQ=${REQ}"$key;\\n" + elif [ "$key" = "ansible-core" ] && [ "$value" = "latest" ]; then + REQ=${REQ}"https://github.com/ansible/ansible/archive/devel.tar.gz\\n" else REQ=${REQ}"$key==$value;\\n" fi @@ -159,11 +169,18 @@ make_venv_dirs(){ # We should think about the idea of allowing: # --force, --synch, --update thus not sure we need this method and better to # manage this logic inline to write_req - for file in `ls requirements-[0-9].[0-9]*.env`; do - # eg extract 2.14 from requirements-2.14.sh file name - ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` - venv_name="venv"-$ansible_version - #echo $venv_name + # for file in `ls requirements-[0-9].[0-9]*.env`; do + for file in `ls *requirements-[0-9].[0-9]*.env* *requirements-latest* 2>/dev/null`; do + if [[ "$file" =~ "latest" ]]; then + # eg extract 'latest' from requirements-latest file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` + venv_name="venv"-$ansible_version + else + # eg extract 2.14 from requirements-2.14.sh file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + #echo $venv_name + fi mkdir -p "${VENV_HOME_MANAGED}"/"${venv_name}" done } @@ -197,8 +214,9 @@ write_requirements(){ fi done - #for file in `ls requirements-*.sh`; do - for file in `ls requirements-[0-9].[0-9]*.env`; do + # for file in `ls requirements-*.sh`; do + # for file in `ls requirements-[0-9].[0-9]*.env`; do + for file in `ls *requirements-[0-9].[0-9]*.env* *requirements-latest* 2>/dev/null`; do # Unset the vars from any prior sourced files unset REQ unset requirements @@ -210,9 +228,17 @@ write_requirements(){ echo "Unable to source file: $file." fi - ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` - venv_name="venv"-$ansible_version - echo $venv_name + if [[ "$file" =~ "latest" ]]; then + # eg extract 'latest' from requirements-latest file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` + venv_name="venv"-$ansible_version + echo $venv_name + else + # eg extract 2.14 from requirements-2.14.sh file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + echo $venv_name + fi for pkg in "${requirements[@]}" ; do key=${pkg%%:*} @@ -222,6 +248,8 @@ write_requirements(){ REQ=${REQ}"$key;\\n" elif [ -z "$value" ]; then REQ=${REQ}"$key;\\n" + elif [ "$key" = "ansible-core" ] && [ "$value" = "latest" ]; then + REQ=${REQ}"https://github.com/ansible/ansible/archive/devel.tar.gz\\n" else REQ=${REQ}"$key==$value;\\n" fi @@ -290,11 +318,21 @@ write_requirements(){ create_venv_and_pip_install_req(){ - for file in `ls requirements-[0-9].[0-9]*.env`; do + # for file in `ls requirements-[0-9].[0-9]*.env`; do + for file in `ls *requirements-[0-9].[0-9]*.env* *requirements-latest* 2>/dev/null`; do unset venv - ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` - venv_name="venv"-$ansible_version - echo $venv_name + + if [[ "$file" =~ "latest" ]]; then + # eg extract 'latest' from requirements-latest file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` + venv_name="venv"-$ansible_version + else + # eg extract 2.14 from requirements-2.14.sh file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + #echo $venv_name + fi + if [ -f $VENV_HOME_MANAGED/$venv_name/requirements.txt ]; then echo ${DIVIDER} @@ -339,8 +377,13 @@ discover_python(){ VERSION_PYTHON=$required_python fi - # Don't use which, it only will find first in path within the script - # for python_found in `which python3 | cut -d" " -f3`; do + # Note: + # Don't use which, it only will find first in path within the script + # for python_found in `which python3 | cut -d" " -f3`; do + # + # The 'pys' array will search for pythons in reverse order, once it finds one that matches + # the requirements-x.xx.env it does not continue searching. Reverse order is important to + # maintain. pys=("python3.14" "python3.13" "python3.12" "python3.11" "python3.10" "python3.9" "python3.8") rc=1 for py in "${pys[@]}"; do @@ -517,7 +560,12 @@ ssh_host_credentials(){ # field in the host_list not equal to none, it will also be copied for jenkins ################################################################################ ssh_copy_key(){ - sshpass -p "${pass}" ssh-copy-id -o StrictHostKeyChecking=no -i ~/.ssh/id_rsa.pub "${user}"@"${host}" &> /dev/null + # sshpass -p "${pass}" ssh-copy-id -o StrictHostKeyChecking=no -i ~/.ssh/id_rsa.pub "${user}"@"${host}" &> /dev/null + # Copying all public keys because some of the sytems don't agree on RSA as a mutual signature algorithm + for pub in `ls ~/.ssh/*.pub`; do + echo "Copying public key ${pub} to host ${host}" + sshpass -p "${pass}" ssh-copy-id -o StrictHostKeyChecking=no -i "${pub}" "${user}"@"${host}" &> /dev/null; + done if [ ! -z "$SSH_KEY_PIPELINE" ]; then echo "${SSH_KEY_PIPELINE}" | ssh "${user}"@"${host}" "mkdir -p ~/.ssh && cat >> ~/.ssh/authorized_keys" From a53266ba0c7ebe99fb28e4efda54c1472175efe7 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Thu, 2 May 2024 13:49:29 -0700 Subject: [PATCH 367/495] [Enabler] [module_utils/data_set.py] Add default values to MVSDataSet class (#1495) * Add default values to MVSDataSet class * Add changelog fragment --- .../1495-default-values-data-set-class.yml | 4 ++ plugins/module_utils/data_set.py | 38 +++++++++---------- 2 files changed, 23 insertions(+), 19 deletions(-) create mode 100644 changelogs/fragments/1495-default-values-data-set-class.yml diff --git a/changelogs/fragments/1495-default-values-data-set-class.yml b/changelogs/fragments/1495-default-values-data-set-class.yml new file mode 100644 index 000000000..eb8118ad2 --- /dev/null +++ b/changelogs/fragments/1495-default-values-data-set-class.yml @@ -0,0 +1,4 @@ +trivial: + - module_utils/data_set.py - add default values to the init method of + MVSDataSet. + (https://github.com/ansible-collections/ibm_zos_core/pull/1495). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index a4910c6fa..b153fcf4a 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1807,25 +1807,25 @@ class MVSDataSet(): def __init__( self, name, - data_set_type, - state, - organization, - record_format, - volumes, - block_size, - record_length, - space_primary, - space_secondary, - space_type, - directory_blocks, - key_length, - key_offset, - sms_storage_class, - sms_data_class, - sms_management_class, - total_space, - used_space, - last_referenced, + data_set_type=None, + state=None, + organization=None, + record_format=None, + volumes=None, + block_size=None, + record_length=None, + space_primary=None, + space_secondary=None, + space_type=None, + directory_blocks=None, + key_length=None, + key_offset=None, + sms_storage_class=None, + sms_data_class=None, + sms_management_class=None, + total_space=None, + used_space=None, + last_referenced=None, ): self.name = name self.organization = organization From b16ceb62643f8da42535100f6f68b9271758b93f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 3 May 2024 09:42:47 -0600 Subject: [PATCH 368/495] [Enabler] Fix resolve GDS function to return a string with the gds name instead of zoau dataset object (#1496) * fixed gds function * Updated changelogs --- changelogs/fragments/1496-fix-gds-resolve.yml | 4 ++++ plugins/module_utils/data_set.py | 6 +++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/1496-fix-gds-resolve.yml diff --git a/changelogs/fragments/1496-fix-gds-resolve.yml b/changelogs/fragments/1496-fix-gds-resolve.yml new file mode 100644 index 000000000..17683da96 --- /dev/null +++ b/changelogs/fragments/1496-fix-gds-resolve.yml @@ -0,0 +1,4 @@ +trivial: + - module_utils/data_set.py - resolve_gds_absolute_name was returning a ZOAU dataset type instead + of string. + (https://github.com/ansible-collections/ibm_zos_core/pull/1496). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index b153fcf4a..c6b6b4e52 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1374,11 +1374,11 @@ def resolve_gds_absolute_name(relative_name): raise Exception gdg = gdgs.GenerationDataGroupView(name=gdg_base) generations = gdg.generations() - absolute_name = generations[rel_generation - 1] - except Exception as e: + gds = generations[rel_generation - 1] + except Exception: raise GDSNameResolveError(relative_name) - return absolute_name + return gds.name @staticmethod def escape_data_set_name(name): From c8f1166b6c76faeba2b545f31b14c5252504370e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 6 May 2024 10:34:49 -0600 Subject: [PATCH 369/495] Enabler/1319/redesign mvs raw (#1470) * First iteration to clean up * Add classes aside * Fix output * Add changes * Final design * Fix ansible-lint * Fix mvs_raw * Remove dataset_exist and put utility * Add documentation on numpy * Add description --- .../fragments/1470-redesign_mvs_raw.yml | 3 + plugins/module_utils/zos_mvs_raw.py | 375 ++++- plugins/modules/zos_mvs_raw.py | 1217 ++++++++--------- 3 files changed, 900 insertions(+), 695 deletions(-) create mode 100644 changelogs/fragments/1470-redesign_mvs_raw.yml diff --git a/changelogs/fragments/1470-redesign_mvs_raw.yml b/changelogs/fragments/1470-redesign_mvs_raw.yml new file mode 100644 index 000000000..5fc3ae6dd --- /dev/null +++ b/changelogs/fragments/1470-redesign_mvs_raw.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_mvs_raw - Redesign the wrappers of dd clases to use properly the arguments. + (https://github.com/ansible-collections/ibm_zos_core/pull/1470). \ No newline at end of file diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index 466775939..ba74ca38a 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -18,6 +18,17 @@ AnsibleModuleHelper, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import ( + FileDefinition, + DatasetDefinition, + InputDefinition, + OutputDefinition, +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSet +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + backup as zos_backup, +) + class MVSCmd(object): """Provides an interface to execute authorized and unauthorized MVS commands. @@ -27,13 +38,19 @@ class MVSCmd(object): def execute(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): """Execute an unauthorized MVS command. - Args: - pgm (str): The name of the program to execute. - dds (list[DDStatement]): A list of DDStatement objects. - parm (str, optional): Argument string if required by the program. Defaults to "". + Parameters + ---------- + pgm : str + The name of the program to execute. + dds : list[DDStatement] + A list of DDStatement objects. + parm : str, optional) + Argument string if required by the program. Defaults to "". - Returns: - MVSCmdResponse: The response of the command. + Returns + ------- + MVSCmdResponse : object + The response of the command. """ module = AnsibleModuleHelper(argument_spec={}) command = "mvscmd {0} {1} {2} {3}".format( @@ -49,14 +66,21 @@ def execute(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): """Execute an authorized MVS command. - Args: - pgm (str): The name of the program to execute. - dds (list[DDStatement]): A list of DDStatement objects. - parm (str, optional): Argument string if required by the program. Defaults to "". - tmp_hlq (str): The name of the temporary high level qualifier to use for temp data sets. + Parameters + ---------- + pgm : str + The name of the program to execute. + dds : list[DDStatement] + A list of DDStatement objects. + parm : str, optional + Argument string if required by the program. Defaults to "". + tmp_hlq : str + The name of the temporary high level qualifier to use for temp data sets. - Returns: - MVSCmdResponse: The response of the command. + Returns + ------- + MVSCmdResponse : object + The response of the command. """ module = AnsibleModuleHelper(argument_spec={}) command = "mvscmdauth {0} {1} {2} {3} ".format( @@ -72,13 +96,19 @@ def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=No def _build_command(pgm, dds, parm): """Build the command string to be used by ZOAU mvscmd/mvscmdauth. - Args: - pgm (str): [description] - dds (list[DDStatement]): A list of DDStatement objects. - parm (str, optional): Argument string if required by the program. Defaults to "". + Parameters + ---------- + pgm : str + [description] + dds : list[DDStatement] + A list of DDStatement objects. + parm : str, optional + Argument string if required by the program. Defaults to "". - Returns: - str: Command string formatted as expected by mvscmd/mvscmdauth. + Returns + ------- + command : str + Command string formatted as expected by mvscmd/mvscmdauth. """ args_string = "" if parm: @@ -99,3 +129,310 @@ def __init__(self, rc, stdout, stderr): self.rc = rc self.stdout = stdout self.stderr = stderr + + +class RawDatasetDefinition(DatasetDefinition): + """Wrapper around DatasetDefinition to contain information about + desired return contents. + """ + + def __init__( + self, + data_set_name, + disposition="", + disposition_normal=None, + disposition_abnormal=None, + space_type=None, + space_primary=None, + space_secondary=None, + volumes=None, + sms_management_class=None, + sms_storage_class=None, + sms_data_class=None, + block_size=None, + directory_blocks=None, + key_label=None, + type=None, + encryption_key_1=None, + encryption_key_2=None, + key_length=None, + key_offset=None, + record_length=None, + record_format=None, + reuse=None, + replace=None, + backup=None, + return_content=None, + tmphlq=None, + **kwargs + ): + """ + DatasetDefinition (DatasetDefinition): Dataset DD data type to be used in a DDStatement. + Parameters + ---------- + Args: + data_set_name : str + The name of the data set. + disposition : str, optional + The disposition of the data set. Defaults to "". + type : str, optional + The type of the data set. Defaults to None. + space_primary : int, optional + The primary amount of space of the data set. Defaults to None. + space_secondary : int, optional + The secondary amount of space of the data set. Defaults to None. + space_type : str, optional + The unit of space to use for primary and secondary space. Defaults to None. + disposition_normal : str, optional + What to do with the data set after normal termination of the program. Defaults to None. + disposition_abnormal : str, optional + What to do with the data set after abnormal termination of the program. Defaults to None. + block_size : int, optional + The block size of the data set. Defaults to None. + directory_blocks : int, optional + The number of directory blocks to allocate for the data set. Defaults to None. + record_format : str, optional + The record format of the data set. Defaults to None. + record_length : int, optional + The length, in bytes, of each record in the data set. Defaults to None. + sms_storage_class : str, optional + The storage class for an SMS-managed dataset. Defaults to None. + sms_data_class : str, optional + The data class for an SMS-managed dataset. Defaults to None. + sms_management_class : str, optional + The management class for an SMS-managed dataset. Defaults to None. + key_length : int, optional + The key length of a record. Defaults to None. + key_offset : int, optional + The key offset is the position of the first byte of the key + in each logical record of a the specified VSAM data set. Defaults to None. + volumes : list, optional + A list of volume serials.. Defaults to None. + key_label : str, optional + The label for the encryption key used by the system to encrypt the data set. Defaults to None. + encryption_key_1 : dict, optional + The label for the key encrypting key used by the Encryption Key Manager and how the label + for the key encrypting key specified. + encryption_key_2 : dict, optional + The label for the key encrypting key used by the Encryption Key Manager and how the label + for the key encrypting key specified + reuse : bool, optional + Determines if data set should be reused. Defaults to None. + replace : bool, optional + Determines if data set should be replaced. Defaults to None. + backup : bool, optional + Determines if a backup should be made of existing data set when disposition=NEW, replace=true, + and a data set with the desired name is found.. Defaults to None. + return_content : dict, optional + Determines how content should be returned to the user. Defaults to None. + tmphlq : str, optional + HLQ to be used for temporary datasets. Defaults to None. + ---------- + """ + self.backup = None + self.return_content = ReturnContent(**(return_content or {})) + self.tmphlq = tmphlq + primary_unit = space_type + secondary_unit = space_type + key_label1 = None + key_encoding1 = None + key_label2 = None + key_encoding2 = None + if encryption_key_1: + if encryption_key_1.get("label"): + key_label1 = encryption_key_1.get("label") + if encryption_key_1.get("encoding"): + key_encoding1 = encryption_key_1.get("encoding") + if encryption_key_2: + if encryption_key_2.get("label"): + key_label2 = encryption_key_2.get("label") + if encryption_key_2.get("encoding"): + key_encoding2 = encryption_key_2.get("encoding") + + should_reuse = False + if (reuse or replace) and DataSet.data_set_exists(data_set_name, volumes): + if reuse: + should_reuse = True + elif replace: + if backup: + self.backup = zos_backup.mvs_file_backup(data_set_name, None, tmphlq) + DataSet.delete(data_set_name) + + if not should_reuse: + super().__init__( + dataset_name=data_set_name, + disposition=disposition, + type=type, + primary=space_primary, + primary_unit=primary_unit, + secondary=space_secondary, + secondary_unit=secondary_unit, + normal_disposition=disposition_normal, + conditional_disposition=disposition_abnormal, + block_size=block_size, + directory_blocks=directory_blocks, + record_format=record_format, + record_length=record_length, + storage_class=sms_storage_class, + data_class=sms_data_class, + management_class=sms_management_class, + key_length=key_length, + key_offset=key_offset, + volumes=volumes, + dataset_key_label=key_label, + key_label1=key_label1, + key_encoding1=key_encoding1, + key_label2=key_label2, + key_encoding2=key_encoding2, + ) + else: + # TODO: determine if encoding labels are useful for existing data sets + super().__init__( + dataset_name=data_set_name, + disposition="shr", + type=type, + normal_disposition=disposition_normal, + conditional_disposition=disposition_abnormal, + volumes=volumes, + dataset_key_label=key_label, + key_label1=key_label1, + key_encoding1=key_encoding1, + key_label2=key_label2, + key_encoding2=key_encoding2, + ) + + +class RawFileDefinition(FileDefinition): + """Wrapper around FileDefinition to contain information about + desired return contents. + """ + + def __init__( + self, + path, + disposition_normal=None, + disposition_abnormal=None, + mode=None, + status_group=None, + access_group=None, + file_data_type=None, + block_size=None, + record_length=None, + record_format=None, + return_content=None, + **kwargs + ): + """ + FileDefinition (FileDefinition): File DD data type to be used in a DDStatement. + Parameters + ---------- + path : str + An absolute UNIX file path. + disposition_normal : str, optional + What to do with path after normal program termination. Defaults to None. + disposition_abnormal : str, optional + What to do with path after abnormal program termination. Defaults to None. + mode : int, optional + The file access attributes for the UNIX file being allocated. Defaults to None. + access_group : str, optional + The access mode for UNIX file. Defaults to None. + status_group : list[str], optional + The status for UNIX file being allocated. Defaults to None. + file_data_type : str, optional + The type of data that is (or will be) stored in the UNIX file. Defaults to None. + record_length : int, optional + The specified logical record length for the UNIX file. Defaults to None. + block_size : int, optional + The specified block size for the UNIX file being allocated. Defaults to None. + record_format : str, optional + The specified record format for the UNIX file. Defaults to None. + return_content : dict, optional + Determines how content should be returned to the user. Defaults to None. + """ + self.return_content = ReturnContent(**(return_content or {})) + super().__init__( + path_name=path, + normal_disposition=disposition_normal, + conditional_disposition=disposition_abnormal, + path_mode=mode, + access_group=access_group, + status_group=status_group, + file_data=file_data_type, + record_length=record_length, + block_size=block_size, + record_format=record_format, + ) + + +class RawInputDefinition(InputDefinition): + """Wrapper around InputDefinition to contain information about + desired return contents. + """ + + def __init__( + self, + content="", + return_content=None, + tmphlq="", + **kwargs + ): + """ + InputDefinition (InputDefinition): Input DD data type to be used in a DDStatement. + Parameters + ---------- + content : str + The content to write to temporary data set / stdin. + return_content : dict, optional + Determines how content should be returned to the user. Defaults to {}. + """ + self.return_content = ReturnContent(**(return_content or {})) + super().__init__( + content=content, + tmphlq=tmphlq) + + +class RawOutputDefinition(OutputDefinition): + """Wrapper around OutputDefinition to contain information about + desired return contents. + """ + + def __init__( + self, + return_content=None, + tmphlq="", + **kwargs + ): + """ + OutputDefinition (OutputDefinition): Output DD data type to be used in a DDStatement. + Parameters + ---------- + content : str + The content to write to temporary data set / stdin. + return_content : dict, optional + Determines how content should be returned to the user. Defaults to {}. + """ + self.return_content = ReturnContent(**(return_content or {})) + super().__init__( + tmphlq=tmphlq + ) + + +class ReturnContent(object): + """Holds information about what type of content + should be returned for a particular DD, if any. + """ + + def __init__(self, type=None, src_encoding=None, response_encoding=None): + """ + Parameters + ---------- + type : str, optional + The type of content to return. + src_encoding : str, optional + The encoding of the data set or file on the z/OS system. + response_encoding : str, optional + The encoding to use when returning the contents of the data set or file. + """ + self.type = type + self.src_encoding = src_encoding + self.response_encoding = response_encoding diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index bcac50a63..af24cd8e3 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -1278,6 +1278,14 @@ backup_name: description: The name of the data set containing the backup of content from data set in original_name. type: str +stdout: + description: The stdout from a USS command or MVS command, if applicable. + returned: always + type: str +stderr: + description: The stderr of a USS command or MVS command, if applicable. + returned: failure + type: str """ EXAMPLES = r""" @@ -1584,18 +1592,16 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import ( DDStatement, - FileDefinition, - DatasetDefinition, - InputDefinition, - OutputDefinition, DummyDefinition, VIODefinition, ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSet -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmd -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - backup as zos_backup, +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import ( + MVSCmd, + RawDatasetDefinition, + RawFileDefinition, + RawInputDefinition, + RawOutputDefinition, ) from ansible.module_utils.basic import AnsibleModule @@ -1614,9 +1620,16 @@ ENCODING_ENVIRONMENT_VARS = {"_BPXK_AUTOCVT": "OFF"} -# hold backup names in easy to access location in -# in case exception is raised -# this global list is only used in case of exception +ACCESS_GROUP_NAME_MAP = { + "read_only": "ordonly", + "write_only": "owronly", + "read_write": "ordwr", + "r": "ordonly", + "w": "owronly", + "rw": "ordwr", +} + + backups = [] @@ -1785,9 +1798,7 @@ def run_module(): ) ) - dd_data_set = dict( - type="dict", options=combine_dicts(dd_name_base, dd_data_set_base) - ) + dd_data_set = dict(type="dict", options=combine_dicts(dd_name_base, dd_data_set_base)) dd_unix = dict(type="dict", options=combine_dicts(dd_name_base, dd_unix_base)) dd_input = dict(type="dict", options=combine_dicts(dd_name_base, dd_input_base)) dd_output = dict(type="dict", options=combine_dicts(dd_name_base, dd_output_base)) @@ -1816,6 +1827,12 @@ def run_module(): ), ) + # ---------------------------------------------------------------------------- # + # Validate arguments # + # ---------------------------------------------------------------------------- # + + module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) + # ---------------------------------------------------------------------------- # # Main Logic # # ---------------------------------------------------------------------------- # @@ -1823,7 +1840,7 @@ def run_module(): result = dict(changed=False, dd_names=[], ret_code=dict(code=8)) response = {} dd_statements = [] - module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) + if not module.check_mode: try: parms = parse_and_validate_args(module.params) @@ -1839,7 +1856,7 @@ def run_module(): dd_statements=dd_statements, authorized=authorized, verbose=verbose, - tmp_hlq=tmphlq, + tmphlq=tmphlq, ) if program_response.rc != 0 and program_response.stderr: raise ZOSRawError( @@ -1847,7 +1864,7 @@ def run_module(): "{0} {1}".format(program_response.stdout, program_response.stderr), ) - response = build_response(program_response.rc, dd_statements) + response = build_response(program_response.rc, dd_statements, program_response.stdout) result["changed"] = True except Exception as e: result["backups"] = backups @@ -1856,18 +1873,21 @@ def run_module(): result = dict(changed=True, dd_names=[], ret_code=dict(code=0)) to_return = combine_dicts(result, response) module.exit_json(**to_return) - # ---------------------------------------------------------------------------- # def parse_and_validate_args(params): """Perform additional argument validation to validate and update input content, - Args: - params (dict): The raw module parameters as provided by AnsibleModule. + Parameters + ---------- + params : dict + The raw module parameters as provided by AnsibleModule. - Returns: - dict: The module parameters after validation and content updates. + Returns + ------- + parsed_args : dict + The module parameters after validation and content updates. """ dd_name_base = dict(dd_name=dict(type="dd", required=True)) @@ -1933,8 +1953,8 @@ def parse_and_validate_args(params): type="dict", options=dict( type=dict(type="str", choices=["text", "base64"], required=True), - src_encoding=dict(type=encoding, default="ibm-1047"), - response_encoding=dict(type=encoding, default="iso8859-1"), + src_encoding=dict(type="str", default="ibm-1047"), + response_encoding=dict(type="str", default="iso8859-1"), ), ), reuse=dict(type=reuse, default=False, dependencies=["disposition"]), @@ -1950,8 +1970,8 @@ def parse_and_validate_args(params): type="dict", options=dict( type=dict(type="str", choices=["text", "base64"], required=True), - src_encoding=dict(type=encoding, default="ibm-1047"), - response_encoding=dict(type=encoding, default="iso8859-1"), + src_encoding=dict(type="str", default="ibm-1047"), + response_encoding=dict(type="str", default="iso8859-1"), ), ), ) @@ -1962,8 +1982,8 @@ def parse_and_validate_args(params): required=True, options=dict( type=dict(type="str", choices=["text", "base64"], required=True), - src_encoding=dict(type=encoding, default="ibm-1047"), - response_encoding=dict(type=encoding, default="iso8859-1"), + src_encoding=dict(type="str", default="ibm-1047"), + response_encoding=dict(type="str", default="iso8859-1"), ), ), ) @@ -1989,8 +2009,8 @@ def parse_and_validate_args(params): type="dict", options=dict( type=dict(type="str", choices=["text", "base64"], required=True), - src_encoding=dict(type=encoding, default="ibm-1047"), - response_encoding=dict(type=encoding, default="iso8859-1"), + src_encoding=dict(type="str", default="ibm-1047"), + response_encoding=dict(type="str", default="iso8859-1"), ), ), ) @@ -2011,9 +2031,7 @@ def parse_and_validate_args(params): ) ) - dd_data_set = dict( - type="dict", options=combine_dicts(dd_name_base, dd_data_set_base) - ) + dd_data_set = dict(type="dict", options=combine_dicts(dd_name_base, dd_data_set_base)) dd_unix = dict(type="dict", options=combine_dicts(dd_name_base, dd_unix_base)) dd_input = dict(type="dict", options=combine_dicts(dd_name_base, dd_input_base)) dd_output = dict(type="dict", options=combine_dicts(dd_name_base, dd_output_base)) @@ -2053,12 +2071,17 @@ def combine_dicts(dict1, dict2): """Combine two dictionaries. Provides clean way to combine two dictionaries in python >= 2 - Args: - dict1 (dict): The first dict to add to combine - dict2 (dict): The second dict to add to combine - - Returns: - dict: The combination of dict1 and dict2. + Parameters + ---------- + dict1 : dict + The first dict to add to combine + dict2 : dict + The second dict to add to combine + + Returns + ------- + merged_dict : dict + The combination of dict1 and dict2. """ merged_dict = dict1.copy() merged_dict.update(dict2) @@ -2067,12 +2090,23 @@ def combine_dicts(dict1, dict2): def key_length(contents, dependencies): """Validates key length - Args: - contents (int): argument contents - dependencies (dict): Any dependent arguments - Returns: - int: provided key length + Parameters + ---------- + contents : int + Argument contents + dependencies : dict + Any dependent arguments + + Raises + ------- + ValueError : str + When invalid argument provided. + + Returns + ------- + contents : int + Provided key length """ if contents is None: return contents @@ -2088,12 +2122,23 @@ def key_length(contents, dependencies): def key_offset(contents, dependencies): """Validates key offset - Args: - contents (int): argument contents - dependencies (dict): Any dependent arguments - Returns: - int: provided key offset + Parameters + ---------- + contents : int + Argument contents + dependencies : dict + Any dependent arguments + + Raises + ------- + ValueError : str + When invalid argument provided. + + Returns + ------- + contents : int + Provided key offset """ if contents is None: return contents @@ -2110,12 +2155,18 @@ def key_offset(contents, dependencies): def key_length_default(contents, dependencies): """Determines default key length - Args: - contents (int): argument contents - dependencies (dict): Any dependent arguments - Returns: - int: default key length + Parameters + ---------- + contents : int + Argument contents + dependencies : dict + Any dependent arguments + + Returns + ------- + length : int + Default key length """ KEY_LENGTH = 5 length = None @@ -2128,12 +2179,18 @@ def key_length_default(contents, dependencies): def key_offset_default(contents, dependencies): """Determines default key offset - Args: - contents (int): argument contents - dependencies (dict): Any dependent arguments - Returns: - int: default key offset + Parameters + ---------- + contents : int + Argument contents + dependencies : dict + Any dependent arguments + + Returns + ------- + offset : int + Default key offset """ KEY_OFFSET = 0 offset = None @@ -2144,74 +2201,20 @@ def key_offset_default(contents, dependencies): return offset -def encoding(contents, dependencies): - """Validates encoding arguments - - Args: - contents (str): argument contents - dependencies (dict): Any dependent arguments - - Raises: - ValueError: Provided encoding not found in list of valid encodings. - ValueError: Provided encoding had invalid characters for encoding name. - - Returns: - str: valid encoding - """ - encoding = None - if contents is None: - encoding = None - valid_encodings = [] - if contents: - valid_encodings = get_valid_encodings() - if valid_encodings: - if contents.lower() not in valid_encodings: - raise ValueError( - 'Provided encoding "{0}" is not valid. Valid encodings are: {1}.'.format( - contents, ", ".join(valid_encodings) - ) - ) - else: - # if can't get list of encodings perform basic check for bad characters - if not re.fullmatch(r"^[A-Z0-9-]{2,}$", str(contents), re.IGNORECASE): - raise ValueError( - 'Provided encoding "{0}" is not valid. Valid encodings are: {1}.'.format( - contents, ", ".join(valid_encodings) - ) - ) - encoding = contents - return encoding - - -def get_valid_encodings(): - """Retrieve all valid encodings from the system - - Returns: - list[str]: list of all valid encodings on the system - """ - module = AnsibleModuleHelper(argument_spec={}) - valid_encodings = [] - rc, stdout, stderr = module.run_command("iconv -l") - if rc or stderr: - return valid_encodings - if stdout: - # ignores first line of output which will be "Character sets:"" - stdout_lines = [line.lower() for line in stdout.split("\n")[1:]] - for line in stdout_lines: - encodings_from_line = line.split() - valid_encodings += encodings_from_line - return valid_encodings - - def dd_content(contents, dependencies): """Reformats dd content arguments - Args: - contents (Union[str, list[str]]): argument contents - dependencies (dict): Any dependent arguments - - Returns: - str: content string to save to data set + Parameters + ---------- + contents : Union[str, list[str]] + Argument contents + dependencies : dict + Any dependent arguments + + Returns + ------- + contents : str + Content string to save to data set """ if contents is None: return None @@ -2225,15 +2228,76 @@ def dd_content(contents, dependencies): return contents +def modify_contents(contents): + """Return the content of dd_input to a valid form for a JCL program. + + Parameters + ---------- + contents : str or list + The string or list with the program. + + Returns + ------- + contents : str + The content in a proper multi line str. + """ + if not isinstance(contents, list): + contents = list(contents.split("\n")) + contents = prepend_spaces(contents) + contents = "\n".join(contents) + return contents + + +def prepend_spaces(lines): + """Return the array with two spaces at the beggining. + + Parameters + ---------- + lines : list + The list with a line of a program. + + Raises + ------- + ValueError : str + When invalid argument provided. + + Returns + ------- + new_lines : list[str] + The list in a proper two spaces and the code. + """ + module = AnsibleModuleHelper(argument_spec={}) + for index, line in enumerate(lines): + if len(line) > 0: + if len(line) > 80: + msg = """Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """ + module.fail_json(msg=msg.format(line)) + else: + if len(line) > 1 and line[0] != " " and line[1] != " ": + if len(line) > 78: + msg = """Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. + If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """ + module.fail_json(msg=msg.format(line)) + else: + lines[index] = " {0}".format(line) + return lines + + def sms_class(contents, dependencies): """Validates provided sms class is of valid length. - Args: - contents (str): argument contents - dependencies (dict): Any dependent arguments - - Returns: - str: the sms class + Parameters + ---------- + contents : str + Argument contents + dependencies : dict + Any dependent arguments + + Returns + ------- + contents : str + the sms class """ if not contents: return None @@ -2250,15 +2314,22 @@ def sms_class(contents, dependencies): def volumes(contents, dependencies): """Validate volume arguments. - Args: - contents (Union[str, list[str]]): The contents provided for the volume argument. - dependencies (dict): Any arguments this argument is dependent on. - - Raises: - ValueError: When invalid argument provided. - - Returns: - list[str]: The contents returned as a list of volumes + Parameters + ---------- + contents : Union[str, list[str]] + The contents provided for the volume argument. + dependencies : dict + Any arguments this argument is dependent on. + + Raises + ------- + ValueError : str + When invalid argument provided. + + Returns + ------- + contents : list[str] + The contents returned as a list of volumes """ if not contents: return None @@ -2278,15 +2349,22 @@ def volumes(contents, dependencies): def reuse(contents, dependencies): """Validate reuse argument. - Args: - contents (bool): The contents provided for the reuse argument. - dependencies (dict): Any arguments this argument is dependent on. - - Raises: - ValueError: When invalid argument provided. - - Returns: - bool: The value of reuse. + Parameters + ---------- + contents : bool + The contents provided for the reuse argument. + dependencies : dict + Any arguments this argument is dependent on. + + Raises + ------- + ValueError : str + When invalid argument provided. + + Returns + ------- + contents : bool + The value of reuse. """ if contents is True and dependencies.get("disposition") != "new": raise ValueError('Argument "reuse" is only valid when "disposition" is "new".') @@ -2296,15 +2374,22 @@ def reuse(contents, dependencies): def replace(contents, dependencies): """Validate replace argument. - Args: - contents (bool): The contents provided for the replace argument. - dependencies (dict): Any arguments this argument is dependent on. - - Raises: - ValueError: When invalid argument provided. - - Returns: - bool: The value of replace. + Parameters + ---------- + contents : bool + The contents provided for the replace argument. + dependencies : dict + Any arguments this argument is dependent on. + + Raises + ------- + ValueError : str + When invalid argument provided. + + Returns + ------- + contents : bool + The value of replace. """ if contents is True and dependencies.get("reuse") is True: raise ValueError('Arguments "replace" and "reuse" are mutually exclusive.') @@ -2318,15 +2403,22 @@ def replace(contents, dependencies): def backup(contents, dependencies): """Validate backup argument. - Args: - contents (bool): The contents provided for the backup argument. - dependencies (dict): Any arguments this argument is dependent on. - - Raises: - ValueError: When invalid argument provided. - - Returns: - bool: The value of backup. + Parameters + ---------- + contents : bool + The contents provided for the backup argument. + dependencies : dict + Any arguments this argument is dependent on. + + Raises + ------- + ValueError : str + When invalid argument provided. + + Returns + ------- + contents : bool + The value of backup. """ if contents is True and dependencies.get("replace") is False: raise ValueError('Argument "backup" is only valid when "replace" is True.') @@ -2336,12 +2428,17 @@ def backup(contents, dependencies): def status_group(contents, dependencies): """Validate status group argument. - Args: - contents (list[str]): The contents provided for the status_group argument. - dependencies (dict): Any arguments this argument is dependent on. - - Returns: - list[str]: The access group as expected by mvscmd. + Parameters + ---------- + contents : list[str] + The contents provided for the status_group argument. + dependencies : dict + Any arguments this argument is dependent on. + + Returns + ------- + contents : list[str] + The access group as expected by mvscmd. """ if not contents: return None @@ -2361,21 +2458,18 @@ def status_group(contents, dependencies): def access_group(contents, dependencies): """Validate access group argument. - Args: - contents (str): The contents provided for the access_group argument. - dependencies (dict): Any arguments this argument is dependent on. - - Returns: - str: The access group as expected by mvscmd. + Parameters + ---------- + contents : str + The contents provided for the access_group argument. + dependencies : dict + Any arguments this argument is dependent on. + + Returns + ------- + contents : str + The access group as expected by mvscmd. """ - ACCESS_GROUP_NAME_MAP = { - "read_only": "ordonly", - "write_only": "owronly", - "read_write": "ordwr", - "r": "ordonly", - "w": "owronly", - "rw": "ordwr", - } if contents and ACCESS_GROUP_NAME_MAP.get(contents): contents = ACCESS_GROUP_NAME_MAP.get(contents) return contents @@ -2384,19 +2478,26 @@ def access_group(contents, dependencies): def build_dd_statements(parms): """Build a list of DDStatement objects from provided module parms. - Args: - parms (dict): Module parms after formatting and validation. + Parameters + ---------- + parms : dict + Module parms after formatting and validation. - Raises: - ValueError: If no data definition can be found matching provided DD type. + Raises + ------- + ValueError : None + If no data definition can be found matching provided DD type. - Returns: - list[DDStatement]: List of DDStatement objects representing DD statements specified in module parms. + Returns + ------- + dd_statements : list[DDStatement] + List of DDStatement objects representing DD statements specified in module parms. """ dd_statements = [] + tmphlq = parms.get("tmphlq") for dd in parms.get("dds"): - dd_name = get_dd_name(dd) - dd = set_extra_attributes_in_dd(dd, parms) + dd_name, key = get_dd_name_and_key(dd) + dd = set_extra_attributes_in_dd(dd, tmphlq, key) data_definition = build_data_definition(dd) if data_definition is None: raise ValueError("No valid data definition found.") @@ -2405,80 +2506,122 @@ def build_dd_statements(parms): return dd_statements -def get_dd_name(dd): - """Get the DD name from a DD parm as specified in module parms. +def get_key(dd): + """ + Get the key of the dd. + Parameters + ---------- + dd : dict + A single DD parm as specified in module parms. + + Returns + ------- + key : str + Type of dd. + """ + dd_key = "" + keys_list = list(dd.keys()) + for key in keys_list: + if "dd" in key: + dd_key = key + return dd_key - Args: - dd (dict): A single DD parm as specified in module parms. - Returns: - str: The DD name. +def get_dd_name_and_key(dd): + """ + Get the key and dd_name of the dd. + Parameters + ---------- + dd : dict + A single DD parm as specified in module parms. + + Returns + ------- + dd_name : str + Identifier of the dd. + key : str + Type of dd. """ dd_name = "" + key = "" if dd.get("dd_data_set"): dd_name = dd.get("dd_data_set").get("dd_name") + key = "dd_data_set" elif dd.get("dd_unix"): dd_name = dd.get("dd_unix").get("dd_name") + key = "dd_unix" elif dd.get("dd_input"): dd_name = dd.get("dd_input").get("dd_name") + key = "dd_input" elif dd.get("dd_output"): dd_name = dd.get("dd_output").get("dd_name") + key = "dd_output" elif dd.get("dd_vio"): dd_name = dd.get("dd_vio").get("dd_name") + key = "dd_vio" elif dd.get("dd_dummy"): dd_name = dd.get("dd_dummy").get("dd_name") + key = "dd_dummy" elif dd.get("dd_concat"): dd_name = dd.get("dd_concat").get("dd_name") - return dd_name + key = "dd_concat" + return dd_name, key -def set_extra_attributes_in_dd(dd, parms): +def set_extra_attributes_in_dd(dd, tmphlq, key): """ Set any extra attributes in dds like in global tmp_hlq. - Args: - dd (dict): A single DD parm as specified in module parms. - - Returns: - dd (dict): A single DD parm as specified in module parms. + Parameters + ---------- + dd : dict + A single DD parm as specified in module parms. + + Returns + ------- + dd : dict + A single DD parm as specified in module parms. """ - tmphlq = parms.get("tmp_hlq") - if dd.get("dd_data_set"): - dd.get("dd_data_set")["tmphlq"] = tmphlq - elif dd.get("dd_input"): - dd.get("dd_input")["tmphlq"] = tmphlq - elif dd.get("dd_output"): - dd.get("dd_output")["tmphlq"] = tmphlq - elif dd.get("dd_vio"): - dd.get("dd_vio")["tmphlq"] = tmphlq - elif dd.get("dd_concat"): + if key == "dd_concat": for single_dd in dd.get("dd_concat").get("dds", []): - set_extra_attributes_in_dd(single_dd, parms) + key_concat = get_key(single_dd) + set_extra_attributes_in_dd(single_dd, tmphlq, key_concat) + elif dd.get(key): + dd.get(key)["tmphlq"] = tmphlq return dd def build_data_definition(dd): """Build a DataDefinition object for a particular DD parameter. - Args: - dd (dict): A single DD parm as specified in module parms. + Parameters + ---------- + dd : dict + A single DD parm as specified in module parms. - Returns: - Union[list[RawDatasetDefinition, RawFileDefinition, - RawInputDefinition], - RawDatasetDefinition, RawFileDefinition, - RawInputDefinition, DummyDefinition]: The DataDefinition object or a list of DataDefinition objects. + Returns + ------- + data_definition : Union[list[RawDatasetDefinition, RawFileDefinition,vRawInputDefinition], + RawDatasetDefinition, RawFileDefinition, RawInputDefinition, DummyDefinition] + The DataDefinition object or a list of DataDefinition objects. """ data_definition = None if dd.get("dd_data_set"): - data_definition = RawDatasetDefinition(**(dd.get("dd_data_set"))) + data_definition = RawDatasetDefinition( + **(dd.get("dd_data_set"))) + if data_definition.backup: + backups.append(get_backups(data_definition.backup, dd.get("dd_data_set").get("data_set_name"))) elif dd.get("dd_unix"): - data_definition = RawFileDefinition(**(dd.get("dd_unix"))) + data_definition = RawFileDefinition( + **(dd.get("dd_unix"))) elif dd.get("dd_input"): - data_definition = RawInputDefinition(**(dd.get("dd_input"))) + data_definition = RawInputDefinition( + **(dd.get("dd_input"))) elif dd.get("dd_output"): - data_definition = RawOutputDefinition(**(dd.get("dd_output"))) + data_definition = RawOutputDefinition( + **(dd.get("dd_output"))) elif dd.get("dd_vio"): - data_definition = VIODefinition(dd.get("dd_vio").get("tmphlq")) + data_definition = VIODefinition( + dd.get("dd_vio").get("tmphlq")) elif dd.get("dd_dummy"): data_definition = DummyDefinition() elif dd.get("dd_concat"): @@ -2488,365 +2631,67 @@ def build_data_definition(dd): return data_definition -# TODO: clean up data definition wrapper classes -class RawDatasetDefinition(DatasetDefinition): - """Wrapper around DatasetDefinition to contain information about - desired return contents. - - Args: - DatasetDefinition (DatasetDefinition): Dataset DD data type to be used in a DDStatement. - """ - - def __init__( - self, - data_set_name, - disposition="", - type=None, - space_primary=None, - space_secondary=None, - space_type=None, - disposition_normal=None, - disposition_abnormal=None, - block_size=None, - directory_blocks=None, - record_format=None, - record_length=None, - sms_storage_class=None, - sms_data_class=None, - sms_management_class=None, - key_length=None, - key_offset=None, - volumes=None, - key_label=None, - encryption_key_1=None, - encryption_key_2=None, - reuse=None, - replace=None, - backup=None, - return_content=None, - tmphlq=None, - **kwargs - ): - """Initialize RawDatasetDefinition - - Args: - data_set_name (str): The name of the data set. - disposition (str, optional): The disposition of the data set. Defaults to "". - type (str, optional): The type of the data set. Defaults to None. - space_primary (int, optional): The primary amount of space of the data set. Defaults to None. - space_secondary (int, optional): The secondary amount of space of the data set. Defaults to None. - space_type (str, optional): The unit of space to use for primary and secondary space. Defaults to None. - disposition_normal (str, optional): What to do with the data set after normal termination of the program. Defaults to None. - disposition_abnormal (str, optional): What to do with the data set after abnormal termination of the program. Defaults to None. - block_size (int, optional): The block size of the data set. Defaults to None. - directory_blocks (int, optional): The number of directory blocks to allocate for the data set. Defaults to None. - record_format (str, optional): The record format of the data set. Defaults to None. - record_length (int, optional): The length, in bytes, of each record in the data set. Defaults to None. - sms_storage_class (str, optional): The storage class for an SMS-managed dataset. Defaults to None. - sms_data_class (str, optional): The data class for an SMS-managed dataset. Defaults to None. - sms_management_class (str, optional): The management class for an SMS-managed dataset. Defaults to None. - key_length (int, optional): The key length of a record. Defaults to None. - key_offset (int, optional): The key offset is the position of the first byte of the key - in each logical record of a the specified VSAM data set. Defaults to None. - volumes (list, optional): A list of volume serials.. Defaults to None. - key_label (str, optional): The label for the encryption key used by the system to encrypt the data set. Defaults to None. - encryption_key_1 (dict, optional): [description]. Defaults to None. - encryption_key_2 (dict, optional): [description]. Defaults to None. - reuse (bool, optional): Determines if data set should be reused. Defaults to None. - replace (bool, optional): Determines if data set should be replaced. Defaults to None. - backup (bool, optional): Determines if a backup should be made of existing data set when disposition=NEW, replace=true, - and a data set with the desired name is found.. Defaults to None. - return_content (dict, optional): Determines how content should be returned to the user. Defaults to None. - tmphlq (str, optional): HLQ to be used for temporary datasets. Defaults to None. - """ - self.backup = None - self.return_content = ReturnContent(**(return_content or {})) - self.tmphlq = tmphlq - primary_unit = space_type - secondary_unit = space_type - key_label1 = None - key_encoding1 = None - key_label2 = None - key_encoding2 = None - if encryption_key_1: - if encryption_key_1.get("label"): - key_label1 = encryption_key_1.get("label") - if encryption_key_1.get("encoding"): - key_encoding1 = encryption_key_1.get("encoding") - if encryption_key_2: - if encryption_key_2.get("label"): - key_label2 = encryption_key_2.get("label") - if encryption_key_2.get("encoding"): - key_encoding2 = encryption_key_2.get("encoding") - - should_reuse = False - if (reuse or replace) and data_set_exists(data_set_name, volumes): - if reuse: - should_reuse = True - elif replace: - if backup: - self.backup = zos_backup.mvs_file_backup(data_set_name, None, tmphlq) - backups.append( - {"original_name": data_set_name, "backup_name": self.backup} - ) - DataSet.delete(data_set_name) - - if not should_reuse: - super().__init__( - dataset_name=data_set_name, - disposition=disposition, - type=type, - primary=space_primary, - primary_unit=primary_unit, - secondary=space_secondary, - secondary_unit=secondary_unit, - normal_disposition=disposition_normal, - conditional_disposition=disposition_abnormal, - block_size=block_size, - directory_blocks=directory_blocks, - record_format=record_format, - record_length=record_length, - storage_class=sms_storage_class, - data_class=sms_data_class, - management_class=sms_management_class, - key_length=key_length, - key_offset=key_offset, - volumes=volumes, - dataset_key_label=key_label, - key_label1=key_label1, - key_encoding1=key_encoding1, - key_label2=key_label2, - key_encoding2=key_encoding2, - ) - else: - # TODO: determine if encoding labels are useful for existing data sets - super().__init__( - dataset_name=data_set_name, - disposition="shr", - type=type, - normal_disposition=disposition_normal, - conditional_disposition=disposition_abnormal, - volumes=volumes, - dataset_key_label=key_label, - key_label1=key_label1, - key_encoding1=key_encoding1, - key_label2=key_label2, - key_encoding2=key_encoding2, - ) - - -class RawFileDefinition(FileDefinition): - """Wrapper around FileDefinition to contain information about - desired return contents. - - Args: - FileDefinition (FileDefinition): File DD data type to be used in a DDStatement. - """ - - def __init__( - self, - path, - disposition_normal=None, - disposition_abnormal=None, - mode=None, - access_group=None, - status_group=None, - file_data_type=None, - record_length=None, - block_size=None, - record_format=None, - return_content=None, - **kwargs - ): - """Initialize RawFileDefinition - - Args: - path (str): An absolute UNIX file path. - disposition_normal (str, optional): What to do with path after normal program termination. Defaults to None. - disposition_abnormal (str, optional): What to do with path after abnormal program termination. Defaults to None. - mode (int, optional): The file access attributes for the UNIX file being allocated. Defaults to None. - access_group (str, optional): the access mode for UNIX file. Defaults to None. - status_group (list[str], optional): The status for UNIX file being allocated. Defaults to None. - file_data_type (str, optional): The type of data that is (or will be) stored in the UNIX file. Defaults to None. - record_length (int, optional): The specified logical record length for the UNIX file. Defaults to None. - block_size (int, optional): the specified block size for the UNIX file being allocated. Defaults to None. - record_format (str, optional): The specified record format for the UNIX file. Defaults to None. - return_content (dict, optional): Determines how content should be returned to the user. Defaults to None. - """ - self.return_content = ReturnContent(**(return_content or {})) - super().__init__( - path_name=path, - normal_disposition=disposition_normal, - conditional_disposition=disposition_abnormal, - path_mode=mode, - access_group=access_group, - status_group=status_group, - file_data=file_data_type, - record_length=record_length, - block_size=block_size, - record_format=record_format, - ) - - -class RawInputDefinition(InputDefinition): - """Wrapper around InputDefinition to contain information about - desired return contents. - - Args: - InputDefinition (InputDefinition): Input DD data type to be used in a DDStatement. - """ - - def __init__(self, content="", return_content=None, tmphlq="", **kwargs): - """Initialize RawInputDefinition - - Args: - content (str): The content to write to temporary data set / stdin. - return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}. - """ - self.return_content = ReturnContent(**(return_content or {})) - super().__init__(content=content, tmphlq=tmphlq) - - -class RawOutputDefinition(OutputDefinition): - """Wrapper around OutputDefinition to contain information about - desired return contents. - - Args: - OutputDefinition (OutputDefinition): Output DD data type to be used in a DDStatement. - """ - - def __init__(self, return_content=None, tmphlq="", **kwargs): - """Initialize RawOutputDefinition - - Args: - content (str): The content to write to temporary data set / stdin. - return_content (dict, optional): Determines how content should be returned to the user. Defaults to {}. - """ - self.return_content = ReturnContent(**(return_content or {})) - super().__init__(tmphlq=tmphlq) - - -class ReturnContent(object): - """Holds information about what type of content - should be returned for a particular DD, if any. - - Args: - object (object): The most base type. - """ - - def __init__(self, type=None, src_encoding=None, response_encoding=None): - """Initialize ReturnContent - - Args: - type (str, optional): The type of content to return. - Defaults to None. - src_encoding (str, optional): The encoding of the data set or file on the z/OS system. - Defaults to None. - response_encoding (str, optional): The encoding to use when returning the contents of the data set or file. - Defaults to None. - """ - self.type = type - self.src_encoding = src_encoding - self.response_encoding = response_encoding - - -def rename_parms(parms, name_map): - """Rename parms based on a provided dictionary. - - Args: - parms (dict): The parms before name remapping. - name_map (dict): The dictionary to use for name mapping. - - Returns: - dict: The parms after name mapping. - """ - renamed_parms = {} - for key, value in parms.items(): - if name_map.get(key): - renamed_parms[name_map.get(key)] = value - else: - renamed_parms[key] = value - return renamed_parms - - -def remove_unused_args(parms): - """Remove unused arguments from a dictionary. - Does not function recursively. - - Args: - parms (dict): The dictionary to remove unused arguments from. - - Returns: - dict: The dictionary without any unused arguments. - """ - return {key: value for key, value in parms.items() if value is not None} - - -def data_set_exists(name, volumes=None): - """Is used to determine if a data set exists. - In addition, if a data set does exist and is uncataloged, - the data set will be cataloged. - - Args: - name (str): The name of the data set. - volumes (list[str], optional): A list of volume serials. Defaults to None. - - Returns: - bool: Whether the data set exists or not. - """ - exists = False - try: - present, changed = DataSet.attempt_catalog_if_necessary(name, volumes) - exists = present - except Exception: - # Failure locating or cataloging the data set. Go ahead assumming it does not exist. - # exists = False to avoid using pass clause which results in bandit warning. - exists = False - return exists +def get_backups(backup, data_set_name): + backups = {"original_name": data_set_name, "backup_name": backup} + return backups def run_zos_program( - program, parm="", dd_statements=None, authorized=False, verbose=False, tmp_hlq=None + program, parm="", dd_statements=None, authorized=False, verbose=False, tmphlq=None ): """Run a program on z/OS. - Args: - program (str): The name of the program to run. - parm (str, optional): Additional argument string if required. Defaults to "". - dd_statements (list[DDStatement], optional): DD statements to allocate for the program. Defaults to []. - authorized (bool, optional): Determines if program will execute as an authorized user. Defaults to False. - tmp_hlq (str, optional): Arguments overwrite variable tmp_hlq - - Returns: - MVSCmdResponse: Holds the response information for program execution. + Parameters + ---------- + program : str + The name of the program to run. + parm : str, optional + Additional argument string if required. Defaults to "". + dd_statements : list[DDStatement], optional + DD statements to allocate for the program. Defaults to []. + authorized : bool, optional + Determines if program will execute as an authorized user. Defaults to False. + tmphlq : str, optional + Arguments overwrite variable tmp_hlq + + Returns + ------- + response : MVSCmdResponse + Holds the response information for program execution. """ if not dd_statements: dd_statements = [] response = None if authorized: response = MVSCmd.execute_authorized( - pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq + pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmphlq ) else: response = MVSCmd.execute( - pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmp_hlq + pgm=program, parm=parm, dds=dd_statements, verbose=verbose, tmp_hlq=tmphlq ) return response -def build_response(rc, dd_statements): +def build_response(rc, dd_statements, stdout): """Build response dictionary to return at module completion. - Args: - rc (int): The return code of the program. - dd_statements (list[DDStatement]): The DD statements for the program. - - Returns: - dict: Response dictionary in format expected for response on module completion. + Parameters + ---------- + rc : int + The return code of the program. + dd_statements : list[DDStatement] + The DD statements for the program. + + Returns + ------- + response : dict + Response dictionary in format expected for response on module completion. """ response = {"ret_code": {"code": rc}} response["backups"] = gather_backups(dd_statements) response["dd_names"] = gather_output(dd_statements) + response["stdout"] = stdout return response @@ -2854,11 +2699,15 @@ def gather_backups(dd_statements): """Gather backup information for all data sets which had a backup made during module execution. - Args: - dd_statements (list[DDStatement]): The DD statements for the program. + Parameters + ---------- + dd_statements : list[DDStatement] + The DD statements for the program. - Returns: - list[dict]: List of backups in format expected for response on module completion. + Returns + ------- + backups : list[dict] + List of backups in format expected for response on module completion. """ backups = [] for dd_statement in dd_statements: @@ -2870,11 +2719,15 @@ def get_dd_backup(dd_statement): """Gather backup information for a single data set if the DD is a data set DD and a backup was made. - Args: - dd (DataDefinition): A single DD statement. + Parameters + ---------- + dd : DataDefinition + A single DD statement. - Returns: - list[dict]: List of backups in format expected for response on module completion. + Returns + ------- + dd_backup : list[dict] + List of backups in format expected for response on module completion. """ dd_backup = [] if ( @@ -2890,11 +2743,15 @@ def get_dd_backup(dd_statement): def get_data_set_backup(dd_statement): """Get backup of a single data set DD statement. - Args: - dd_statement (DDStatement): A single DD statement. + Parameters + ---------- + dd_statement : DDStatement + A single DD statement. - Returns: - dict: Backup information in format expected for response on module completion. + Returns + ------- + backup : dict + Backup information in format expected for response on module completion. """ backup = {} backup["backup_name"] = dd_statement.definition.backup @@ -2905,12 +2762,16 @@ def get_data_set_backup(dd_statement): def get_concatenation_backup(dd_statement): """Get the backup information for a single concatenation DD statement. - Args: - dd_statement (DDStatement): A single DD statement. + Parameters + ---------- + dd_statement : DDStatement + A single DD statement. - Returns: - list[dict]: The backup information of a single DD, in format expected for response on module completion. - Response can contain multiple backups. + Returns + ------- + dd_backup : list[dict] + The backup information of a single DD, in format expected for response on module completion. + Response can contain multiple backups. """ # create new DDStatement objects for each concat member # makes it easier to handle concat and non-concat DDs consistently @@ -2926,11 +2787,15 @@ def gather_output(dd_statements): """Gather DD contents for all DD statements for which content was requested. - Args: - dd_statements (list[DDStatement]): The DD statements for the program. + Parameters + ---------- + dd_statements : list[DDStatement] + The DD statements for the program. - Returns: - list[dict]: The list of DD outputs, in format expected for response on module completion. + Returns + ------- + output : list[dict] + The list of DD outputs, in format expected for response on module completion. """ output = [] for dd_statement in dd_statements: @@ -2941,11 +2806,15 @@ def gather_output(dd_statements): def get_dd_output(dd_statement): """Get the output for a single DD statement. - Args: - dd_statement (DDStatement): A single DD statement. + Parameters + ---------- + dd_statement : DDStatement + A single DD statement. - Returns: - list[dict]: The output of a single DD, in format expected for response on module completion. + Returns + ------- + dd_output : list[dict] + The output of a single DD, in format expected for response on module completion. """ dd_output = [] if ( @@ -2976,11 +2845,15 @@ def get_dd_output(dd_statement): def get_data_set_output(dd_statement): """Get the output of a single data set DD statement. - Args: - dd_statement (DDStatement): A single DD statement. + Parameters + ---------- + dd_statement : DDStatement + A single DD statement. - Returns: - dict: The output of a single DD, in format expected for response on module completion. + Returns + ------- + dd_response : dict + The output of a single DD, in format expected for response on module completion. """ contents = "" if dd_statement.definition.return_content.type == "text": @@ -2998,11 +2871,15 @@ def get_data_set_output(dd_statement): def get_unix_file_output(dd_statement): """Get the output of a single UNIX file DD statement. - Args: - dd_statement (DDStatement): A single DD statement. + Parameters + ---------- + dd_statement : DDStatement + A single DD statement. - Returns: - dict: The output of a single DD, in format expected for response on module completion. + Returns + ------- + dd_response : dict + The output of a single DD, in format expected for response on module completion. """ contents = "" if dd_statement.definition.return_content.type == "text": @@ -3020,12 +2897,16 @@ def get_unix_file_output(dd_statement): def get_concatenation_output(dd_statement): """Get the output of a single concatenation DD statement. - Args: - dd_statement (DDStatement): A single DD statement. + Parameters + ---------- + dd_statement : DDStatement + A single DD statement. - Returns: - list[dict]: The output of a single DD, in the format expected for response on module completion. - Response can contain multiple outputs. + Returns + ------- + dd_response : list[dict] + The output of a single DD, in the format expected for response on module completion. + Response can contain multiple outputs. """ # create new DDStatement objects for each concat member # makes it easier to handle concat and non-concat DDs consistently @@ -3041,13 +2922,19 @@ def build_dd_response(dd_name, name, contents): """Gather additional response metrics and format as expected for response on module completion. - Args: - dd_name (str): The DD name associated with this response. - name (str): The data set or UNIX file name associated with the response. - contents (str): The raw contents taken from the data set or UNIX file. - - Returns: - dict: Response content info of a single DD, in the format expected for response on module completion. + Parameters + ---------- + dd_name : str + The DD name associated with this response. + name : str + The data set or UNIX file name associated with the response. + contents : str + The raw contents taken from the data set or UNIX file. + + Returns + ------- + dd_response : dict + Response content info of a single DD, in the format expected for response on module completion. """ dd_response = {} dd_response["dd_name"] = dd_name @@ -3061,14 +2948,21 @@ def build_dd_response(dd_name, name, contents): def get_data_set_content(name, binary=False, from_encoding=None, to_encoding=None): """Retrieve the raw contents of a data set. - Args: - name (str): The name of the data set. - binary (bool, optional): Determines if contents are retrieved without encoding conversion. Defaults to False. - from_encoding (str, optional): The encoding of the data set on the z/OS system. Defaults to None. - to_encoding (str, optional): The encoding to receive the data back in. Defaults to None. - - Returns: - str: The raw content of the data set. + Parameters + ---------- + name : str + The name of the data set. + binary : bool, optional + Determines if contents are retrieved without encoding conversion. Defaults to False. + from_encoding : str, optional + The encoding of the data set on the z/OS system. Defaults to None. + to_encoding : str, optional + The encoding to receive the data back in. Defaults to None. + + Returns + ------- + quoted_name : str + The raw content of the data set. """ quoted_name = quote(name) if "'" not in quoted_name: @@ -3081,14 +2975,21 @@ def get_data_set_content(name, binary=False, from_encoding=None, to_encoding=Non def get_unix_content(name, binary=False, from_encoding=None, to_encoding=None): """Retrieve the raw contents of a UNIX file. - Args: - name (str): The name of the UNIX file. - binary (bool, optional): Determines if contents are retrieved without encoding conversion. Defaults to False. - from_encoding (str, optional): The encoding of the UNIX file on the z/OS system. Defaults to None. - to_encoding (str, optional): The encoding to receive the data back in. Defaults to None. - - Returns: - str: The raw content of the UNIX file. + Parameters + ---------- + name : str + The name of the UNIX file. + binary : bool, optional + Determines if contents are retrieved without encoding conversion. Defaults to False. + from_encoding : str, optional + The encoding of the UNIX file on the z/OS system. Defaults to None. + to_encoding : str, optional + The encoding to receive the data back in. Defaults to None. + + Returns + ------- + stdout : str + The raw content of the UNIX file. """ return get_content("{0}".format(quote(name)), binary, from_encoding, to_encoding) @@ -3096,14 +2997,21 @@ def get_unix_content(name, binary=False, from_encoding=None, to_encoding=None): def get_content(formatted_name, binary=False, from_encoding=None, to_encoding=None): """Retrieve raw contents of a data set or UNIXfile. - Args: - name (str): The name of the data set or UNIX file, formatted and quoted for proper usage in command. - binary (bool, optional): Determines if contents are retrieved without encoding conversion. Defaults to False. - from_encoding (str, optional): The encoding of the data set or UNIX file on the z/OS system. Defaults to None. - to_encoding (str, optional): The encoding to receive the data back in. Defaults to None. - - Returns: - str: The raw content of the data set or UNIX file. If unsuccessful in retrieving data, returns empty string. + Parameters + ---------- + name : str + The name of the data set or UNIX file, formatted and quoted for proper usage in command. + binary : bool, optional + Determines if contents are retrieved without encoding conversion. Defaults to False. + from_encoding : str, optional + The encoding of the data set or UNIX file on the z/OS system. Defaults to None. + to_encoding : str, optional + The encoding to receive the data back in. Defaults to None. + + Returns + ------- + stdout : str + The raw content of the data set or UNIX file. If unsuccessful in retrieving data, returns empty string. """ module = AnsibleModuleHelper(argument_spec={}) conversion_command = "" @@ -3124,49 +3032,6 @@ def get_content(formatted_name, binary=False, from_encoding=None, to_encoding=No return stdout -def modify_contents(contents): - """Return the content of dd_input to a valid form for a JCL program. - - Args: - contents (str or list): The string or list with the program. - - Returns: - contents: The content in a proper multi line str. - """ - if not isinstance(contents, list): - contents = list(contents.split("\n")) - contents = prepend_spaces(contents) - contents = "\n".join(contents) - return contents - - -def prepend_spaces(lines): - """Return the array with two spaces at the beggining. - - Args: - lines (list): The list with a line of a program. - - Returns: - new_lines: The list in a proper two spaces and the code. - """ - module = AnsibleModuleHelper(argument_spec={}) - for index, line in enumerate(lines): - if len(line) > 0: - if len(line) > 80: - msg = """Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. - If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """ - module.fail_json(msg=msg.format(line)) - else: - if len(line) > 1 and line[0] != " " and line[1] != " ": - if len(line) > 78: - msg = """Length of line {0} is over 80 characters. The maximum length allowed is 80 characters, including 2 spaces at the beginning. - If the two spaces are not present, the module will add them to ensure columns 1 and 2 are blank. """ - module.fail_json(msg=msg.format(line)) - else: - lines[index] = " {0}".format(line) - return lines - - class ZOSRawError(Exception): def __init__(self, program="", error=""): self.msg = "An error occurred during execution of z/OS program {0}. {1}".format( From e2ae039a7923af5b6295c0078155c9ae9dc36f6f Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Wed, 8 May 2024 14:59:18 -0600 Subject: [PATCH 370/495] [Enabler][ac] Add command for 'make module-doc' to ac and as a github action (#1439) * Add command for 'make module-doc' to ac and first try for github action * Second try for github action * Test for github action * Third try for github action * Fourth try for github action * Fifth try for github action * Sixth try for github action * Seventh try for github action * Eighth try for github action * Test for github action * Remove commented lines for tests --------- Co-authored-by: Rich Parker <richp405@gmail.com> --- .github/workflows/ac-module-doc.yml | 42 +++++++++++++++++++++++++++++ ac | 18 +++++++++++++ 2 files changed, 60 insertions(+) create mode 100644 .github/workflows/ac-module-doc.yml diff --git a/.github/workflows/ac-module-doc.yml b/.github/workflows/ac-module-doc.yml new file mode 100644 index 000000000..534f1d8ae --- /dev/null +++ b/.github/workflows/ac-module-doc.yml @@ -0,0 +1,42 @@ +name: AC Module Doc + +on: + pull_request: + paths: + - 'plugins/modules/*' + branches: + - dev + - staging* + +jobs: + module-doc: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Set up venv + run: | + python -m pip install --upgrade pip + pip install virtualenv + mkdir venv + virtualenv venv/venv-2.16 + + - name: Install dependencies + run: | + source venv/venv-2.16/bin/activate + pip install ansible + pip install ansible-doc-extractor + + - name: Run ac-module-doc + run: | + source venv/venv-2.16/bin/activate + export ANSIBLE_LIBRARY=/home/runner/work/ibm_zos_core/ibm_zos_core/plugins/modules/ + ./ac --ac-build + ./ac --ac-module-doc diff --git a/ac b/ac index 652ba7099..016b760ea 100755 --- a/ac +++ b/ac @@ -331,6 +331,18 @@ ac_install(){ fi } +# Run a make module doc +# ------------------------------------------------------------------------------ +#->ac-module-doc: +## Runs make module-doc to generate the module documentation +## Usage: ac [--ac-module-doc] +## Example: +## $ ac --ac-module-doc +ac_module_doc(){ + message "Running make module-doc" + . $VENV_BIN/activate && cd docs/ && make module-doc +} + # ------------------------------------------------------------------------------ # Run ansible-lint on the locally checked out GH Branch # ------------------------------------------------------------------------------ @@ -753,6 +765,10 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-changelog" ;; + --ac-module-doc) # Command + ensure_managed_venv_exists $1 + option_submitted="--ac-module-doc" + ;; --ac-install) ensure_managed_venv_exists $1 # Command option_submitted="--ac-install" @@ -914,6 +930,8 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-galaxy-importer" ] ac_galaxy_importer elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then ac_changelog $command +elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-module-doc" ] ; then + ac_module_doc elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then ac_install $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-lint" ] ; then From c6844bb656c86069944b7486dbc67a38028853a9 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 8 May 2024 14:14:56 -0700 Subject: [PATCH 371/495] [Enabler] [zos_copy] Use ZOAU's force option during copies (#1488) * Use force option during copies * Add change log fragment * Move change log fragment --- changelogs/fragments/1488-zos_copy-refactor-force.yml | 4 ++++ plugins/modules/zos_copy.py | 10 ++-------- 2 files changed, 6 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1488-zos_copy-refactor-force.yml diff --git a/changelogs/fragments/1488-zos_copy-refactor-force.yml b/changelogs/fragments/1488-zos_copy-refactor-force.yml new file mode 100644 index 000000000..ec91f11aa --- /dev/null +++ b/changelogs/fragments/1488-zos_copy-refactor-force.yml @@ -0,0 +1,4 @@ +trivial: + - zos_copy - use keyword argument force when copying data sets, instead + of a dictionary. + (https://github.com/ansible-collections/ibm_zos_core/pull/1488). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index da29f688a..34d78d8a2 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -948,11 +948,8 @@ def copy_to_seq( if self.is_binary or self.asa_text: copy_args["options"] = "-B" - if self.force_lock: - copy_args["options"] += " -f" - try: - datasets.copy(new_src, dest, **copy_args) + datasets.copy(new_src, dest, force=self.force_lock, **copy_args) except zoau_exceptions.ZOAUException as copy_exception: raise CopyOperationError( msg="Unable to copy source {0} to {1}".format(new_src, dest), @@ -1815,11 +1812,8 @@ def copy_to_member( if self.is_binary or self.asa_text: opts["options"] = "-B" - if self.force_lock: - opts["options"] += " -f" - try: - rc = datasets.copy(src, dest, alias=self.aliases, executable=self.executable, **opts) + rc = datasets.copy(src, dest, alias=self.aliases, executable=self.executable, force=self.force_lock, **opts) out = "" err = "" except zoau_exceptions.ZOAUException as copy_exception: From 2a7a1280438114e4d8f1a2c15d5b7a83f1a72cb8 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 14 May 2024 09:40:29 -0600 Subject: [PATCH 372/495] [Documentation] Inform users the need to escape commands with special characters (#1507) * Added zos operator docs * Added notes * Added changelog --- changelogs/fragments/1507-zos_operator-docs.yml | 3 +++ plugins/modules/zos_operator.py | 8 +++++++- 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1507-zos_operator-docs.yml diff --git a/changelogs/fragments/1507-zos_operator-docs.yml b/changelogs/fragments/1507-zos_operator-docs.yml new file mode 100644 index 000000000..e12e66eb6 --- /dev/null +++ b/changelogs/fragments/1507-zos_operator-docs.yml @@ -0,0 +1,3 @@ +trivial: + - zos_operator - Added a doc entry to inform users they need to escape certain special chars. + (https://github.com/ansible-collections/ibm_zos_core/pull/1507). \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 012a46c0c..366285d22 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -35,6 +35,9 @@ - The command to execute. - If the command contains single-quotations, another set of single quotes must be added. - For example, change the command "...,P='DSN3EPX,-DBC1,S'" to "...,P=''DSN3EPX,-DBC1,S'' ". + - If the command contains any special characters ($, &, etc), they must be escaped using + double backslashes like \\\$. + - For example, to display job by job name the command would be C(cmd:"\\$dj''HELLO''") type: str required: true verbose: @@ -55,6 +58,9 @@ type: int required: false default: 1 +notes: + - Commands may need to use specific prefixes like $, they can be discovered by + issuing the following command C(D OPDATA,PREFIX). """ EXAMPLES = r""" From 659a92f4c446e88ef84f7c757d6f2afdaef603a7 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 09:41:08 -0600 Subject: [PATCH 373/495] [documentation][zos_archive] Add and standarize docstrings on modules/zos_archive.py (#1415) * Advance on docstrings on modules/zos_archive.py * Advance on docstrings on modules/zos_archive.py * Advance on docstrings on modules/zos_archive.py * Advance on docstrings on modules/zos_archive.py * Add and standarize docstrings on modules/zos_archive.py * Add changelog fragment --------- Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1415-Update_docstring-zos_archive.yml | 3 + plugins/modules/zos_archive.py | 554 ++++++++++++++++-- 2 files changed, 505 insertions(+), 52 deletions(-) create mode 100644 changelogs/fragments/1415-Update_docstring-zos_archive.yml diff --git a/changelogs/fragments/1415-Update_docstring-zos_archive.yml b/changelogs/fragments/1415-Update_docstring-zos_archive.yml new file mode 100644 index 000000000..77f607a62 --- /dev/null +++ b/changelogs/fragments/1415-Update_docstring-zos_archive.yml @@ -0,0 +1,3 @@ +trivial: + - zos_archive - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1415). \ No newline at end of file diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index cbe96b65d..e046a3f9e 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -455,10 +455,16 @@ def get_archive_handler(module): """ Return the proper archive handler based on archive format. - Arguments: - format: {str} - Returns: - Archive: {Archive} + + Parameters + ---------- + module : AnsibleModule + Ansible Module. + + Returns + ------- + Archive : Archive + The archive format for the module. """ format = module.params.get("format").get("name") @@ -472,10 +478,36 @@ def get_archive_handler(module): def strip_prefix(prefix, string): + """Strip prefix. + + Parameters + --------- + prefix : str + Prefix to take out of the string. + string : str + String with the prefix. + + Returns + ------- + str + Given string without the prefix. + """ return string[len(prefix):] if string.startswith(prefix) else string def expand_paths(paths): + """Expand paths. + + Parameters + ---------- + paths : list[str] + List with the paths. + + Returns + ------- + Union[str] + Expanded path. + """ expanded_path = [] for path in paths: if '*' in path or '?' in path: @@ -487,11 +519,74 @@ def expand_paths(paths): def is_archive(path): + """If a path refers to an archive. + + Parameters + ---------- + path : str + The path to the archive. + + Returns + ------- + bool + If is archive. + """ return re.search(r'\.(tar|tar\.(gz|bz2|xz)|tgz|tbz2|zip|gz|bz2|xz|pax)$', os.path.basename(path), re.IGNORECASE) class Archive(): def __init__(self, module): + """Handles archive operations. + + Parameters + ---------- + module : AnsibleModule + Ansible module to get parameters from. + + Attributes + ---------- + module : AnsibleModule + AnsibleModule to use. + dest : str + Destination path. + format : dict + The compression type and corresponding options to use when archiving + data. + remove : bool + Whether to remove any added source files, trees or data sets after module + adds them to the archive. + changed : bool + If there are targeted paths. + errors : str + Errors ocurred. + found : list[str] + List of found datasets. + targets : list[str] + List of paths that are in sources given. + archived : list[str] + Any files or data sets that were compressed or added to the + archive. + not_found : list[str] + List of paths that are missing from the sources. + force : bool + If set to true and the remote file or data set dest will be + deleted. + sources : list[str] + List of sources to get files from. + arcroot : str + If src is a list of USS files, this returns the top most parent + folder of the list of files, otherwise is empty. + expanded_sources : list[str] + The list of matching paths from the src option. + expanded_exclude_sources : list[str] + The list of matching exclude paths from the exclude option. + dest_state : str + The state of the dest file or data set. + state : str + The state of the input C(src). + xmit_log_data_set : str + The name of the data set to store xmit log output. + """ self.module = module self.dest = module.params['dest'] self.format = module.params.get("format").get("name") @@ -512,6 +607,13 @@ def __init__(self, module): self.xmit_log_data_set = "" def targets_exist(self): + """Returns if there are targets or not. + + Returns + ------- + bool + If the targets list is not empty. + """ return bool(self.targets) @abc.abstractmethod @@ -552,6 +654,13 @@ def compute_dest_size(self): @property def result(self): + """Returns a dict with the results. + + Returns + ------- + dict + Arguments showing the result. + """ return { 'archived': self.archived, 'dest': self.dest, @@ -568,6 +677,27 @@ def result(self): class USSArchive(Archive): def __init__(self, module): + """Archive for USS files. + + Parameters + ---------- + module : AnsibleModule + Ansible module to get parameters from. + + Attributes + ---------- + original_checksums : str + The SHA256 hash of the contents of input file. + arcroot : str + If src is a list of USS files, this returns the top most parent + folder of the list of files, otherwise is empty. + expanded_sources : list[str] + The list of matching paths from the src option. + expanded_exclude_sources : list[str] + The list of matching exclude paths from the exclude option. + sources : list[str] + List of sources to get files from. + """ super(USSArchive, self).__init__(module) self.original_checksums = self.dest_checksums() if len(self.sources) == 1: @@ -581,16 +711,33 @@ def __init__(self, module): self.sources = sorted(set(self.expanded_sources) - set(self.expanded_exclude_sources)) def dest_exists(self): + """Returns if destination path exits. + + Returns + ------- + bool + If destination path exists. + """ return os.path.exists(self.dest) def dest_type(self): + """Returns the destination type. + + Returns + str + "USS". + """ return "USS" def update_permissions(self): + """Updates permissions. + """ file_args = self.module.load_file_common_arguments(self.module.params, path=self.dest) self.changed = self.module.set_fs_attributes_if_different(file_args, self.changed) def find_targets(self): + """Classifies paths in source to either targets or not_found based on whether they exist or not. + """ for path in self.sources: if os.path.exists(path): self.targets.append(path) @@ -598,13 +745,17 @@ def find_targets(self): self.not_found.append(path) def _get_checksums(self, src): - """Calculate SHA256 hash for a given file + """Calculate SHA256 hash for a given file. - Arguments: - src {str} -- The absolute path of the file + Parameters + ---------- + src : str + The absolute path of the file. - Returns: - str -- The SHA256 hash of the contents of input file + Returns + ------- + str + The SHA256 hash of the contents of input file. """ b_src = to_bytes(src) if not os.path.exists(b_src) or os.path.isdir(b_src): @@ -622,16 +773,32 @@ def _get_checksums(self, src): return hash_digest.hexdigest() def dest_checksums(self): + """Returns destination file checksums if it exists. + + Returns + ------- + str + The SHA256 hash of the contents of destination file. + """ if self.dest_exists(): return self._get_checksums(self.dest) return None def is_different_from_original(self): + """Checks if the destination is different from the original based on checksums. + + Returns + ------- + bool + If the SHA256 hash of the contents of destination file is different from the original's. + """ if self.original_checksums is not None: return self.original_checksums != self.dest_checksums() return True def remove_targets(self): + """Removes the archived targets and changes the state accordingly. + """ self.state = STATE_ABSENT for target in self.archived: if os.path.isdir(target): @@ -646,6 +813,8 @@ def remove_targets(self): self.state = STATE_INCOMPLETE def archive_targets(self): + """Archives targets + """ self.file = self.open(self.dest) try: @@ -682,10 +851,21 @@ def archive_targets(self): self.file.close() def add(self, source, arcname): + """Add source into the destination archive. + + Parameters + ---------- + source : str + Source of the file. + arcname : str + Destination archive name for where to add the source into. + """ self._add(source, arcname) self.archived.append(source) def get_state(self): + """Sets dest_state attribute based on if the destination exists, is an archive or any path was not found. + """ if not self.dest_exists(): self.dest_state = STATE_ABSENT else: @@ -697,9 +877,28 @@ def get_state(self): class TarArchive(USSArchive): def __init__(self, module): + """Archive for Tar. + + Parameters + ---------- + module : AnsibleModule + AnsibleModule to use. + """ super(TarArchive, self).__init__(module) def open(self, path): + """Open the archive with the given path. + + Parameters + ---------- + path : str + Path of the archive. + + Returns + ------- + TarFile + The opened TarFile. + """ if self.format == 'tar': file = tarfile.open(path, 'w') elif self.format == 'pax': @@ -709,14 +908,47 @@ def open(self, path): return file def _add(self, source, arcname): + """Add source into the destination archive. + + Parameters + ---------- + source : str + Source of the file. + arcname : str + Destination archive name for where to add the source into. + """ self.file.add(source, arcname) class ZipArchive(USSArchive): def __init__(self, module): + """Archive for Zip. + + Parameters + ---------- + module : AnsibleModule + AnsibleModule to use. + """ super(ZipArchive, self).__init__(module) def open(self, path): + """Open the archive with the given path. + + Parameters + ---------- + path : str + Path of the archive. + + Returns + ------- + ZipFile + The opened ZipFile. + + Raises + ------ + BadZipFile + Improperly compressed zip file, unable to to open file. + """ try: file = zipfile.ZipFile(path, 'w', zipfile.ZIP_DEFLATED, True) except zipfile.BadZipFile: @@ -726,11 +958,47 @@ def open(self, path): return file def _add(self, source, arcname): + """Add source into the destination archive. + + Parameters + ---------- + source : str + Source of the file. + arcname : str + Destination archive name for where to add the source into. + """ self.file.write(source, arcname) class MVSArchive(Archive): def __init__(self, module): + """Archive for MVS files. + + Parameters + ---------- + module : AnsibleModule + AnsibleModule to use. + + Attributes + ---------- + original_checksums : str + The SHA256 hash of the contents of input file. + use_adrdssu : bool + Whether to use Data Facility Storage Management Subsystem data set services + program ADRDSSU to uncompress data sets or not. + expanded_sources : list[str] + The list of matching paths from the src option. + expanded_exclude_sources : list[str] + The list of matching exclude paths from the exclude option. + sources : list[str] + List of sources to get files from. + dest_dat_set : dict + Destination data set. + source_size : int + Source size. + tmphlq : str + High level qualifier for temporary datasets. + """ super(MVSArchive, self).__init__(module) self.original_checksums = self.dest_checksums() self.use_adrdssu = module.params.get("format").get("format_options").get("use_adrdssu") @@ -749,8 +1017,7 @@ def close(self): pass def find_targets(self): - """ - Finds target datasets in host. + """Finds target datasets in host. """ for path in self.sources: if data_set.DataSet.data_set_exists(path): @@ -779,11 +1046,50 @@ def _create_dest_data_set( ): """Create a temporary data set. - Arguments: - tmp_hlq(str): A HLQ specified by the user for temporary data sets. - - Returns: - str: Name of the temporary data set created. + Parameters + ---------- + name : str + Name for the temporary data set. + replace : bool + Used to determine behavior when data set already exists. + type : str + Type of the dataset. + space_primary : int + Size of the source. + space_secondary : int + The amount of secondary space to allocate for the dataset. + space_type : str + The unit of measurement to use when defining primary and secondary space. + record_format : str + The record format to use for the dataset. + record_length : int + The length, in bytes, of each record in the data set. + block_size : int + The block size to use for the data set. + directory_blocks : int + The number of directory blocks to allocate to the data set. + key_length : int + The key length of a record. + key_offset : int + The key offset is the position of the first byte of the key + in each logical record of a the specified VSAM data set. + sms_storage_class : str + The storage class for an SMS-managed dataset. + sms_data_class : str + The data class for an SMS-managed dataset. + sms_management_class : str + The management class for an SMS-managed dataset. + volumes : list[str,list[str]] + A list of volume serials. + tmp_hlq : str + A HLQ specified by the user for temporary data sets. + force : bool + Used to determine behavior when performing member operations on a pdse. + + Returns + ------- + tuple(str,bool) + Name of the temporary data set created and if something changed. """ arguments = locals() if name is None: @@ -811,12 +1117,17 @@ def _create_dest_data_set( return arguments["name"], changed def create_dest_ds(self, name): - """ - Create destination data set to use as an archive. - Arguments: - name: {str} - Returns: - name {str} - name of the newly created data set. + """Create destination data set to use as an archive. + + Parameters + ---------- + name : str + Name for the dataset. + + Returns + ------- + str + Name of the newly created data set. """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) @@ -835,8 +1146,22 @@ def create_dest_ds(self, name): return name def dump_into_temp_ds(self, temp_ds): - """ - Dump src datasets identified as self.targets into a temporary dataset using ADRDSSU. + """Dump src datasets identified as self.targets into a temporary dataset using ADRDSSU. + + Parameters + ---------- + temp_ds : str + Temporal dataset name. + + Returns + ------- + int + Return code. + + Raises + ------ + fail_json + Failed executing ADRDSSU to archive. """ dump_cmd = """ DUMP OUTDDNAME(TARGET) - OPTIMIZE(4) DS(INCL( - """ @@ -863,6 +1188,18 @@ def dump_into_temp_ds(self, temp_ds): return rc def _get_checksums(self, src): + """Calculate SHA256 hash for a given file. + + Parameters + ---------- + src : str + The absolute path of the file. + + Returns + ------- + str + The SHA256 hash of the contents of input file. + """ sha256_cmd = "sha256 \"//'{0}'\"".format(src) rc, out, err = self.module.run_command(sha256_cmd) checksums = out.split("= ") @@ -871,22 +1208,51 @@ def _get_checksums(self, src): return None def dest_checksums(self): + """Returns destination file checksums if it exists. + + Returns + ------- + str + The SHA256 hash of the contents of destination file. + """ if self.dest_exists(): return self._get_checksums(self.dest) return None def is_different_from_original(self): + """Checks if the destination is different from the original based on checksums. + + Returns + ------- + bool + If the SHA256 hash of the contents of destination file is different from the original's. + """ if self.original_checksums is not None: return self.original_checksums != self.dest_checksums() return True def dest_type(self): + """Returns the destination type. + + Returns + str + "MVS". + """ return "MVS" def dest_exists(self): + """Returns if destination path exits. + + Returns + ------- + bool + If destination path exists. + """ return data_set.DataSet.data_set_exists(self.dest) def remove_targets(self): + """Removes the archived targets and changes the state accordingly. + """ self.state = STATE_ABSENT for target in self.archived: try: @@ -898,6 +1264,18 @@ def remove_targets(self): return def expand_mvs_paths(self, paths): + """Expand mvs paths. + + Parameters + ---------- + paths : list[str] + List of paths to expand. + + Returns + ------- + Union[str] + Extended paths. + """ expanded_path = [] for path in paths: if '*' in path: @@ -909,6 +1287,8 @@ def expand_mvs_paths(self, paths): return expanded_path def get_state(self): + """Sets dest_state attribute based on if the destination exists, is an archive or any path was not found. + """ if not self.dest_exists(): self.dest_state = STATE_ABSENT else: @@ -919,10 +1299,15 @@ def get_state(self): def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False): """Removes any allocated data sets that won't be needed after module termination. - Arguments: - data_sets - {list(str)} : list of data sets to remove - uss_files - {list(str)} : list of uss files to remove - remove_targets - bool : Indicates if already unpacked data sets need to be removed too. + + Parameters + ---------- + data_sets : list(str) + list of data sets to remove + uss_files : list(str) + list of uss files to remove + remove_targets : bool + Indicates if already unpacked data sets need to be removed too. """ if data_set is not None: for ds in data_sets: @@ -937,12 +1322,7 @@ def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False self.remove_targets() def compute_dest_size(self): - """ - Calculate the destination data set based on targets found. - Arguments: - - Returns: - {int} - Destination computed space in kilobytes. + """Calculate the destination data set based on targets found. And sets it do space_primary attribute. """ if self.dest_data_set.get("space_primary") is None: dest_space = 1 @@ -957,6 +1337,18 @@ def compute_dest_size(self): class AMATerseArchive(MVSArchive): def __init__(self, module): + """Archive for XMIT. + + Parameters + ---------- + module : AnsibleModule + AnsibleModule to use. + + Attributes + ---------- + pack_arg : str + Compression option for use with the terse format. + """ super(AMATerseArchive, self).__init__(module) self.pack_arg = module.params.get("format").get("format_options").get("terse_pack") # We store pack_ard in uppercase because the AMATerse command requires @@ -967,11 +1359,24 @@ def __init__(self, module): self.pack_arg = self.pack_arg.upper() def add(self, src, archive): - """ - Archive src into archive using AMATERSE program. - Arguments: - src: {str} - archive: {str} + """Archive src into archive using AMATERSE program. + + Parameters + ---------- + src : str + Source of the archive. + archive : str + Destination archive. + + Returns + ------- + int + Return code. + + Raises + ------ + fail_json + Failed executing AMATERSE to archive source. """ dds = {'args': self.pack_arg, 'sysut1': src, 'sysut2': archive} rc, out, err = mvs_cmd.amaterse(cmd="", dds=dds) @@ -986,8 +1391,12 @@ def add(self, src, archive): return rc def archive_targets(self): - """ - Add MVS Datasets to the AMATERSE Archive by creating a temporary dataset and dumping the source datasets into it. + """Add MVS Datasets to the AMATERSE Archive by creating a temporary dataset and dumping the source datasets into it. + + Raises + ------ + fail_json + To archive multiple source data sets, you must use option 'use_adrdssu=True'. """ if self.use_adrdssu: source, changed = self._create_dest_data_set( @@ -1022,15 +1431,35 @@ def archive_targets(self): class XMITArchive(MVSArchive): def __init__(self, module): + """Archive for XMIT. + + Parameters + ---------- + module : AnsibleModule + AnsibleModule to use. + + Attributes + ---------- + xmit_log_data_set : str + The name of the data set to store xmit log output. + """ super(XMITArchive, self).__init__(module) self.xmit_log_data_set = module.params.get("format").get("format_options").get("xmit_log_data_set") def add(self, src, archive): - """ - Archive src into archive using TSO XMIT. - Arguments: - src: {str} - archive: {str} + """Archive src into archive using TSO XMIT. + + Parameters + ---------- + src : str + Source of the archive. + archive : str + Destination archive. + + Raises + ------ + fail_json + An error occurred while executing 'TSO XMIT' to archive source. """ log_option = "LOGDSNAME({0})".format(self.xmit_log_data_set) if self.xmit_log_data_set else "NOLOG" xmit_cmd = """ @@ -1055,8 +1484,12 @@ def add(self, src, archive): return rc def archive_targets(self): - """ - Adds MVS Datasets to the TSO XMIT Archive by creating a temporary dataset and dumping the source datasets into it. + """Adds MVS Datasets to the TSO XMIT Archive by creating a temporary dataset and dumping the source datasets into it. + + Raises + ------ + fail_json + To archive multiple source data sets, you must use option 'use_adrdssu=True'. """ if self.use_adrdssu: source, changed = self._create_dest_data_set( @@ -1089,13 +1522,21 @@ def archive_targets(self): self.clean_environment(data_sets=self.tmp_data_sets) def get_error_hint(self, output): - """ - Takes a raw TSO XMIT output and parses the abend code and return code to provide an + """Takes a raw TSO XMIT output and parses the abend code and return code to provide an appropriate error hint for the failure. If parsing is not possible then return an empty string. - Arguments: - output (str): Raw TSO XMIT output returned from ikjeft01 when the command fails. + Parameters + ---------- + output : str + Raw TSO XMIT output returned from ikjeft01 when the command fails. + + Returns + ------- + str + Operation failed. + str + '', if IndexError. """ error_messages = dict(D37={"00000004": "There appears to be a space issue. Ensure that there is adequate space and log data sets are not full."}) @@ -1122,6 +1563,15 @@ def get_error_hint(self, output): def run_module(): + """Initialize module. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + The file already exists. Use force flag to replace destination. + """ module = AnsibleModule( argument_spec=dict( src=dict(type='list', elements='str', required=True), From 6e3144648477a476d95e12cee15dc2868e8dae47 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 13:12:55 -0600 Subject: [PATCH 374/495] [Documentation][zos_copy] Add and standarize docstrings on modules/zos_copy.py (#1344) * Standarize doc-strings on modules/zos_copy.py * Half add docstrings on modules/zos_blockinline.py exceptions * Add docstrings to modules/zos_copy.py exceptions * Create changelog fragment * Modify google style to numpy * Standarize numpy style * Update zos_copy.py Added blank link at 2895 to fix pep8 --------- Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1344-update-docstring-zos_copy.yml | 3 + plugins/modules/zos_copy.py | 1044 ++++++++++++----- 2 files changed, 737 insertions(+), 310 deletions(-) create mode 100644 changelogs/fragments/1344-update-docstring-zos_copy.yml diff --git a/changelogs/fragments/1344-update-docstring-zos_copy.yml b/changelogs/fragments/1344-update-docstring-zos_copy.yml new file mode 100644 index 000000000..90ecb9e24 --- /dev/null +++ b/changelogs/fragments/1344-update-docstring-zos_copy.yml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1344). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 34d78d8a2..4333a75b6 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -881,22 +881,56 @@ def __init__( backup_name=None, force_lock=False, ): - """Utility class to handle copying data between two targets - - Arguments: - module {AnsibleModule} -- The AnsibleModule object from currently - running module - - Keyword Arguments: - is_binary {bool} -- Whether the file or data set to be copied - contains binary data - executable {bool} -- Whether the file or data set to be copied - is executable - backup_name {str} -- The USS path or data set name of destination - backup - force_lock {str} -- Whether the dest data set should be copied into - using disp=shr when is opened by another - process. + """Utility class to handle copying data between two targets. + + Parameters + ---------- + module : AnsibleModule + The AnsibleModule object from currently + running module. + + Keyword Parameters + ------------------ + is_binary : bool + Whether the file or data set to be copied + contains binary data. + executable : bool + Whether the file or data set to be copied + is executable. + asa_text : bool + Printer print statement. + aliases : bool + Unused. + backup_name : str + The USS path or data set name of destination + backup. + force_lock : str + Whether the dest data set should be copied into + using disp=shr when is opened by another + process. + + Attributes + ---------- + module : AnsibleModule + The AnsibleModule object from currently + running module. + is_binary : bool + Whether the file or data set to be copied + contains binary data. + executable : bool + Whether the file or data set to be copied + is executable. + asa_text : bool + Printer print statement. + aliases : bool + Unused. + backup_name : str + The USS path or data set name of destination + backup. + force_lock : str + Whether the dest data set should be copied into + using disp=shr when is opened by another + process. """ self.module = module self.is_binary = is_binary @@ -907,7 +941,20 @@ def __init__( self.force_lock = force_lock def run_command(self, cmd, **kwargs): - """ Wrapper for AnsibleModule.run_command """ + """Wrapper for AnsibleModule.run_command. + + Parameters + ---------- + cmd : str + cmd command. + **kwargs : dict + Dictionary with the arguments. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. + """ return self.module.run_command(cmd, **kwargs) def copy_to_seq( @@ -919,14 +966,24 @@ def copy_to_seq( ): """Copy source to a sequential data set. - Raises: - CopyOperationError -- When copying into the data set fails. + Parameters + ---------- + src : str + Path to USS file or data set name. + temp_path : str + Path to the location where the control node + transferred data to. + conv_path : str + Path to the converted source file. + dest : str + Name of destination data set. + src_type : str + Type of the source. - Arguments: - src {str} -- Path to USS file or data set name - conv_path {str} -- Path to the converted source file - dest {str} -- Name of destination data set - src_type {str} -- Type of the source + Raises + ------ + CopyOperationError + When copying into the data set fails. """ new_src = conv_path or src copy_args = dict() @@ -961,12 +1018,17 @@ def copy_to_seq( def copy_to_vsam(self, src, dest): """Copy source VSAM to destination VSAM. - Raises: - CopyOperationError -- When REPRO fails to copy the data set. + Parameters + ---------- + src : str + The name of the source VSAM. + dest : str + The name of the destination VSAM. - Arguments: - src {str} -- The name of the source VSAM - dest {str} -- The name of the destination VSAM + Raises + ------ + CopyOperationError + When REPRO fails to copy the data set. """ out_dsp = "shr" if self.force_lock else "old" dds = {"OUT": "{0},{1}".format(dest.upper(), out_dsp)} @@ -993,17 +1055,26 @@ def _copy_tree(self, entries, src, dest, dirs_exist_ok=False): because the use of shutil.copy2. This issue is only present in Python 3.11 and 3.12. - Arguments: - entries {list} -- List of files under src directory. - src_dir {str} -- USS source directory. - dest_dir {str} -- USS dest directory. - dirs_exist_ok {bool} -- Whether to copy files to an already existing directory. - - Raises: - Exception -- When copying into the directory fails. + Parameters + ---------- + entries : list + List of files under src directory. + src_dir : str + USS source directory. + dest_dir : str + USS dest directory. + dirs_exist_ok : bool + Whether to copy files to an already existing directory. + + Returns + ------- + str + Destination directory that was copied. - Returns: - {str } -- Destination directory that was copied. + Raises + ------ + Exception + When copying into the directory fails. """ os.makedirs(dest, exist_ok=dirs_exist_ok) for src_entry in entries: @@ -1034,13 +1105,19 @@ def copy_tree(self, src_dir, dest_dir, dirs_exist_ok=False): """ Copies a USS directory into another USS directory. - Arguments: - src_dir {str} -- USS source directory. - dest_dir {str} -- USS dest directory. - dirs_exist_ok {bool} -- Whether to copy files to an already existing directory. - - Returns: - {str} -- Destination directory that was copied. + Parameters + ---------- + src_dir : str + USS source directory. + dest_dir : str + USS dest directory. + dirs_exist_ok : bool + Whether to copy files to an already existing directory. + + Returns + ------- + str + Destination directory that was copied. """ with os.scandir(src_dir) as itr: entries = list(itr) @@ -1049,19 +1126,27 @@ def copy_tree(self, src_dir, dest_dir, dirs_exist_ok=False): def convert_encoding(self, src, encoding, remote_src): """Convert encoding for given src - Arguments: - src {str} -- Path to the USS source file or directory - encoding {dict} -- Charsets that the source is to be converted - from and to - remote_src {bool} -- Whether the file was already on the remote - node or not. - - Raises: - CopyOperationError -- When the encoding of a USS file is not - able to be converted + Parameters + ---------- + src : str + Path to the USS source file or directory. + encoding : dict + Charsets that the source is to be converted + from and to. + remote_src : bool + Whether the file was already on the remote + node or not. + + Returns + ------- + str + The USS path where the converted data is stored. - Returns: - {str} -- The USS path where the converted data is stored + Raises + ------ + CopyOperationError + When the encoding of a USS file is not + able to be converted. """ from_code_set = encoding.get("from") to_code_set = encoding.get("to") @@ -1118,16 +1203,22 @@ def convert_encoding(self, src, encoding, remote_src): return new_src def _convert_encoding_dir(self, dir_path, from_code_set, to_code_set): - """Convert encoding for all files inside a given directory + """Convert encoding for all files inside a given directory. - Arguments: - dir_path {str} -- Absolute path to the input directory - from_code_set {str} -- The character set to convert the files from - to_code_set {str} -- The character set to convert the files to + Parameters + ---------- + dir_path : str + Absolute path to the input directory. + from_code_set : str + The character set to convert the files from. + to_code_set : str + The character set to convert the files to. Raises - EncodingConversionError -- When the encoding of a USS file is not - able to be converted + ------ + EncodingConversionError + When the encoding of a USS file is not + able to be converted. """ enc_utils = encode.EncodeUtils() for path, dirs, files in os.walk(dir_path): @@ -1146,17 +1237,23 @@ def _tag_file_encoding(self, file_path, tag, is_dir=False): If `file_path` is a directory, all of the files and subdirectories will be tagged recursively. - Raises: - CopyOperationError -- When chtag fails. - - Arguments: - file_path {str} -- Absolute file path - tag {str} -- Specifies which code set to tag the file + Parameters + ---------- + file_path : str + Absolute file path. + tag : str + Specifies which code set to tag the file. - Keyword Arguments: - is_dir {bool} -- Whether 'file_path' specifies a directory. - (Default {False}) + Keyword Parameters + ------------------ + is_dir : bool + Whether 'file_path' specifies a directory. + (Default {False}) + Raises + ------ + CopyOperationError + When chtag fails. """ tag_cmd = "chtag -{0}c {1} {2}".format( "R" if is_dir else "t", tag, file_path) @@ -1172,7 +1269,18 @@ def _tag_file_encoding(self, file_path, tag, is_dir=False): ) def _merge_hash(self, *args): - """Combine multiple dictionaries""" + """Combine multiple dictionaries. + + Parameters + ---------- + *args : dict + Arguments to merge. + + Returns + ------- + dict + Results of the merge. + """ result = dict() for arg in args: result.update(arg) @@ -1182,12 +1290,16 @@ def file_has_crlf_endings(self, src): """Reads src as a binary file and checks whether it uses CRLF or LF line endings. - Arguments: - src {str} -- Path to a USS file + Parameters + ---------- + src : str + Path to a USS file. - Returns: - {bool} -- True if the file uses CRLF endings, False if it uses LF - ones. + Returns + ------- + bool + True if the file uses CRLF endings, False if it uses LF + ones. """ # Python has to read the file in binary mode to not mask CRLF # endings or enable universal newlines. If we used encoding="cp037", @@ -1208,14 +1320,20 @@ def create_temp_with_lf_endings(self, src): """Creates a temporary file with the same content as src but without carriage returns. - Arguments: - src {str} -- Path to a USS source file. + Parameters + ---------- + src : str + Path to a USS source file. - Raises: - CopyOperationError: If the conversion fails. + Returns + ------- + str + Path to the temporary file created. - Returns: - {str} -- Path to the temporary file created. + Raises + ------ + CopyOperationError + If the conversion fails. """ try: fd, converted_src = tempfile.mkstemp() @@ -1248,18 +1366,29 @@ def __init__( common_file_args=None, backup_name=None, ): - """Utility class to handle copying files or data sets to USS target - - Arguments: - module {AnsibleModule} -- The AnsibleModule object from currently - running module - - Keyword Arguments: - common_file_args {dict} -- mode, group and owner information to be + """Utility class to handle copying files or data sets to USS target. + + Parameters + ---------- + module : AnsibleModule + The AnsibleModule object from currently + running module. + + Keyword Parameters + ------------------ + common_file_args : dict + Mode, group and owner information to be + applied to destination file. + is_binary : bool + Whether the file to be copied contains binary data. + backup_name : str + The USS path or data set name of destination backup. + + Attributes + ---------- + common_file_args : dict + Code, group and owner information to be applied to destination file. - - is_binary {bool} -- Whether the file to be copied contains binary data - backup_name {str} -- The USS path or data set name of destination backup """ super().__init__( module, @@ -1282,20 +1411,34 @@ def copy_to_uss( force, content_copy, ): - """Copy a file or data set to a USS location - - Arguments: - src {str} -- The USS source - dest {str} -- Destination file or directory on USS - conv_path {str} -- Path to the converted source file or directory - src_ds_type {str} -- Type of source - src_member {bool} -- Whether src is a data set member - member_name {str} -- The name of the source data set member - force {bool} -- Whether to copy files to an already existing directory - content_copy {bool} -- Whether copy is using content option or not. - - Returns: - {str} -- Destination where the file was copied to + """Copy a file or data set to a USS location. + + Parameters + ---------- + src : str + The USS source. + dest : str + Destination file or directory on USS. + temp_path : str + Path to the location where the control node + transferred data to. + conv_path : str + Path to the converted source file or directory. + src_ds_type : str + Type of source. + src_member : bool + Whether src is a data set member. + member_name : str + The name of the source data set member. + force : bool + Whether to copy files to an already existing directory. + content_copy : bool + Whether copy is using content option or not. + + Returns + ------- + str + Destination where the file was copied to. """ changed_files = None @@ -1355,17 +1498,26 @@ def copy_to_uss( def _copy_to_file(self, src, dest, content_copy, conv_path): """Helper function to copy a USS src to USS dest. - Arguments: - src {str} -- USS source file path - dest {str} -- USS dest file path - content_copy {bool} -- Whether copy is using content option or not. - conv_path {str} -- Path to the converted source file or directory - - Raises: - CopyOperationError -- When copying into the file fails. + Parameters + ---------- + src : str + USS source file path. + dest : str + USS dest file path. + content_copy : bool + Whether copy is using content option or not. + conv_path : str + Path to the converted source file or directory. + + Returns + ------- + str + Destination where the file was copied to. - Returns: - {str} -- Destination where the file was copied to + Raises + ------ + CopyOperationError + When copying into the file fails. """ src_path = os.path.basename(src) if not content_copy else "inline_copy" if os.path.isdir(dest): @@ -1411,19 +1563,28 @@ def _copy_to_dir( If the path for dest_dir does not end with a trailing slash ("/"), the src_dir itself will be copied into the destination. - Arguments: - src_dir {str} -- USS source directory - dest_dir {str} -- USS dest directory - conv_path {str} -- Path to the converted source directory - force {bool} -- Whether to copy files to an already existing directory - - Raises: - CopyOperationError -- When copying into the directory fails. + Parameters + ---------- + src_dir : str + USS source directory. + dest_dir : str + USS dest directory. + conv_path : str + Path to the converted source directory. + force :bool + Whether to copy files to an already existing directory. + + Returns + ------- + tuple(str,list[str]) + Destination where the directory was copied to, and + a list of paths for all subdirectories and files + that got copied. - Returns: - {tuple} -- Destination where the directory was copied to, and - a list of paths for all subdirectories and files - that got copied. + Raises + ------ + CopyOperationError + When copying into the directory fails. """ copy_directory = True if not src_dir.endswith("/") else False new_src_dir = conv_path or src_dir @@ -1452,19 +1613,24 @@ def _get_changed_files(self, src, dest, copy_directory): """Traverses a source directory and gets all the paths to files and subdirectories that got copied into a destination. - Arguments: - src (str) -- Path to the directory where files are copied from. - dest (str) -- Path to the directory where files are copied into. - copy_directory (bool) -- Whether the src directory itself will be copied - into dest. The src basename will get appended - to filepaths when comparing. - - - Returns: - tuple -- A list of paths for all new subdirectories and files that - got copied into dest, and a list of the permissions - for the files and directories already present on the - destination. + Parameters + ---------- + src : str + Path to the directory where files are copied from. + dest : str + Path to the directory where files are copied into. + copy_directory : bool + Whether the src directory itself will be copied + into dest. The src basename will get appended + to filepaths when comparing. + + Returns + ------- + tuple(list[str],list[tuple(str,int)]) + A list of paths for all new subdirectories and files that + got copied into dest, and a list of the permissions + for the files and directories already present on the + destination. """ files_to_copy = self._walk_uss_tree(src) @@ -1498,10 +1664,16 @@ def _get_changed_files(self, src, dest, copy_directory): def _walk_uss_tree(self, dir): """Walks the tree directory for dir and returns all relative paths found. - Arguments: - dir (str) -- Path to the directory to traverse. - Returns: - list -- List of relative paths to all content inside dir. + + Parameters + ---------- + dir : str + Path to the directory to traverse. + + Returns + ------- + Union[str] + List of relative paths to all content inside dir. """ original_working_dir = os.getcwd() # The function gets relative paths, so it changes the current working @@ -1535,17 +1707,26 @@ def _mvs_copy_to_uss( ): """Helper function to copy an MVS data set src to USS dest. - Arguments: - src {str} -- Name of source data set or data set member - dest {str} -- USS dest file path - src_ds_type -- Type of source - src_member {bool} -- Whether src is a data set member - - Raises: - CopyOperationError -- When copying the data set into USS fails. + Parameters + ---------- + src : str + Name of source data set or data set member. + dest : str + USS dest file path. + src_ds_type + Type of source. + src_member : bool + Whether src is a data set member. + + Keyword Parameters + ------------------ + member_name : str + The name of the source data set member. - Keyword Arguments: - member_name {str} -- The name of the source data set member + Raises + ------ + CopyOperationError + When copying the data set into USS fails. """ if os.path.isdir(dest): @@ -1632,14 +1813,19 @@ def __init__( """ Utility class to handle copying to partitioned data sets or partitioned data set members. - Arguments: - module {AnsibleModule} -- The AnsibleModule object from currently - running module - - Keyword Arguments: - is_binary {bool} -- Whether the data set to be copied contains - binary data - backup_name {str} -- The USS path or data set name of destination backup + Parameters + ---------- + module : AnsibleModule + The AnsibleModule object from currently + running module. + + Keyword Parameters + ------------------ + is_binary : bool + Whether the data set to be copied contains + binary data. + backup_name : str + The USS path or data set name of destination backup. """ super().__init__( module, @@ -1663,17 +1849,27 @@ def copy_to_pdse( ): """Copy source to a PDS/PDSE or PDS/PDSE member. - Raises: - CopyOperationError -- When copying into a member fails. - - Arguments: - src {str} -- Path to USS file/directory or data set name. - conv_path {str} -- Path to the converted source file/directory. - dest {str} -- Name of destination data set. - src_ds_type {str} -- The type of source. - src_member {bool, optional} -- Member of the source data set to copy. - dest_member {str, optional} -- Name of destination member in data set. - encoding {dict, optional} -- Dictionary with encoding options. + Parameters + ---------- + src : str + Path to USS file/directory or data set name. + conv_path : str + Path to the converted source file/directory. + dest : str + Name of destination data set. + src_ds_type : str + The type of source. + src_member : bool, optional + Member of the source data set to copy. + dest_member : str, optional + Name of destination member in data set. + encoding : dict, optional + Dictionary with encoding options. + + Raises + ------ + CopyOperationError + When copying into a member fails. """ new_src = conv_path or src src_members = [] @@ -1789,14 +1985,20 @@ def copy_to_member( - Sequential data sets - PDS/PDSE members - Arguments: - src {str} -- Path to USS file or data set name. - dest {str} -- Name of destination data set - src_type {str} -- Type of the source. - - Returns: - dict -- Dictionary containing the return code, stdout, and stderr from - the copy command. + Parameters + ---------- + src : str + Path to USS file or data set name. + dest : str + Name of destination data set. + src_type : str + Type of the source. + + Returns + ------- + dict + Dictionary containing the return code, stdout, and stderr from + the copy command. """ src = src.replace("$", "\\$") dest = dest.replace("$", "\\$").upper() @@ -1831,11 +2033,15 @@ def copy_to_member( def get_file_record_length(file): """Gets the longest line length from a file. - Arguments: - file (str) -- Path of the file. + Parameters + ---------- + file : str + Path of the file. - Returns: - int -- Length of the longest line in the file. + Returns + ------- + int + Length of the longest line in the file. """ max_line_length = 0 @@ -1859,15 +2065,22 @@ def get_file_record_length(file): def dump_data_set_member_to_file(data_set_member, is_binary): """Dumps a data set member into a file in USS. - Arguments: - data_set_member (str) -- Name of the data set member to dump. - is_binary (bool) -- Whether the data set member contains binary data. - - Returns: - str -- Path of the file in USS that contains the dump of the member. - - Raise: - DataSetMemberAttributeError: When the call to dcp fails. + Parameters + ---------- + data_set_member : str + Name of the data set member to dump. + is_binary : bool + Whether the data set member contains binary data. + + Returns + ------- + str + Path of the file in USS that contains the dump of the member. + + Raise + ----- + DataSetMemberAttributeError + When the call to dcp fails. """ fd, temp_path = tempfile.mkstemp() os.close(fd) @@ -1907,18 +2120,29 @@ def get_data_set_attributes( Block sizes are computed following the recomendations on this documentation page: https://www.ibm.com/docs/en/zos/2.4.0?topic=options-block-size-blksize - Arguments: - name (str) -- Name of the new sequential data set. - size (int) -- Number of bytes needed for the new data set. - is_binary (bool) -- Whether or not the data set will have binary data. - asa_text (bool) -- Whether the data set will have ASA control characters. - record_format (str, optional) -- Type of record format. - record_length (int, optional) -- Record length for the data set. - type (str, optional) -- Type of the new data set. - volume (str, optional) -- Volume where the data set should be allocated. - - Returns: - dict -- Parameters that can be passed into data_set.DataSet.ensure_present + Parameters + ---------- + name : str + Name of the new sequential data set. + size : int + Number of bytes needed for the new data set. + is_binary : bool + Whether or not the data set will have binary data. + asa_text : bool + Whether the data set will have ASA control characters. + record_format : str, optional + Type of record format. + record_length : int, optional + Record length for the data set. + type : str, optional + Type of the new data set. + volume : str, optional + Volume where the data set should be allocated. + + Returns + ------- + dict + Parameters that can be passed into data_set.DataSet.ensure_present. """ # Calculating the size needed to allocate. space_primary = int(math.ceil((size / 1024))) @@ -1981,13 +2205,20 @@ def create_seq_dataset_from_file( """Creates a new sequential dataset with attributes suitable to copy the contents of a file into it. - Arguments: - file (str) -- Path of the source file. - dest (str) -- Name of the data set. - force (bool) -- Whether to replace an existing data set. - is_binary (bool) -- Whether the file has binary data. - asa_text (bool) -- Whether the file has ASA control characters. - volume (str, optional) -- Volume where the data set should be. + Parameters + ---------- + file : str + Path of the source file. + dest : str + Name of the data set. + force : bool + Whether to replace an existing data set. + is_binary : bool + Whether the file has binary data. + asa_text bool + Whether the file has ASA control characters. + volume : str, optional + Volume where the data set should be. """ src_size = os.stat(file).st_size # record_format = record_length = None @@ -2029,13 +2260,21 @@ def backup_data(ds_name, ds_type, backup_name, tmphlq=None): """Back up the given data set or file to the location specified by 'backup_name'. If 'backup_name' is not specified, then calculate a temporary location and copy the file or data set there. - Arguments: - ds_name {str} -- Name of the file or data set to be backed up - ds_type {str} -- Type of the file or data set - backup_name {str} -- Path to USS location or name of data set - where data will be backed up - Returns: - {str} -- The USS path or data set name where data was backed up + + Parameters + ---------- + ds_name : str + Name of the file or data set to be backed up. + ds_type : str + Type of the file or data set. + backup_name : str + Path to USS location or name of data set + where data will be backed up. + + Returns + ------- + str + The USS path or data set name where data was backed up. """ module = AnsibleModuleHelper(argument_spec={}) try: @@ -2066,20 +2305,33 @@ def is_compatible( """Determine whether the src and dest are compatible and src can be copied to dest. - Arguments: - src_type {str} -- Type of the source (e.g. PDSE, USS). - dest_type {str} -- Type of destination. - copy_member {bool} -- Whether dest is a data set member. - src_member {bool} -- Whether src is a data set member. - is_src_dir {bool} -- Whether the src is a USS directory. - is_src_inline {bool} -- Whether the src comes from inline content. - executable {bool} -- Whether the src is a executable to be copied. - asa_text {bool} -- Whether the copy operation will handle ASA control characters. - src_has_asa_chars {bool} -- Whether the src contains ASA control characters. - dest_has_asa_chars {bool} -- Whether the dest contains ASA control characters. - - Returns: - {bool} -- Whether src can be copied to dest. + Parameters + ---------- + src_type : str + Type of the source (e.g. PDSE, USS). + dest_type : str + Type of destination. + copy_member : bool + Whether dest is a data set member. + src_member : bool + Whether src is a data set member. + is_src_dir : bool + Whether the src is a USS directory. + is_src_inline : bool + Whether the src comes from inline content. + executable : bool + Whether the src is a executable to be copied. + asa_text : bool + Whether the copy operation will handle ASA control characters. + src_has_asa_chars : bool + Whether the src contains ASA control characters. + dest_has_asa_chars : bool + Whether the dest contains ASA control characters. + + Returns + ------- + bool + Whether src can be copied to dest. """ # ******************************************************************** @@ -2182,20 +2434,32 @@ def does_destination_allow_copy( """Checks whether or not the module can copy into the destination specified. - Arguments: - src {str} -- Name of the source. - src_type {bool} -- Type of the source (SEQ/PARTITIONED/VSAM/USS). - dest {str} -- Name of the destination. - dest_exists {bool} -- Whether or not the destination exists. - member_exists {bool} -- Whether or not a member in a partitioned destination exists. - dest_type {str} -- Type of the destination (SEQ/PARTITIONED/VSAM/USS). - is_uss {bool} -- Whether or not the destination is inside USS. - force {bool} -- Whether or not the module can replace existing destinations. - volume {str, optional} -- Volume where the destination should be. - - Returns: - bool -- If the module has the permissions needed to create, use or replace - the destination. + Parameters + ---------- + src : str + Name of the source. + src_type : bool + Type of the source (SEQ/PARTITIONED/VSAM/USS). + dest : str + Name of the destination. + dest_exists : bool + Whether or not the destination exists. + member_exists : bool + Whether or not a member in a partitioned destination exists. + dest_type : str + Type of the destination (SEQ/PARTITIONED/VSAM/USS). + is_uss : bool + Whether or not the destination is inside USS. + force : bool + Whether or not the module can replace existing destinations. + volume : str, optional + Volume where the destination should be. + + Returns + ------- + bool + If the module has the permissions needed to create, use or replace + the destination. """ # If the destination is inside USS and the module doesn't have permission to replace it, # it fails. @@ -2221,13 +2485,17 @@ def does_destination_allow_copy( def get_file_checksum(src): - """Calculate SHA256 hash for a given file + """Calculate SHA256 hash for a given file. - Arguments: - src {str} -- The absolute path of the file + Parameters + ---------- + src : str + The absolute path of the file. - Returns: - str -- The SHA256 hash of the contents of input file + Returns + ------- + str + The SHA256 hash of the contents of input file. """ b_src = to_bytes(src) if not os.path.exists(b_src) or os.path.isdir(b_src): @@ -2249,8 +2517,10 @@ def cleanup(src_list): """Remove all files or directories listed in src_list. Also perform additional cleanup of the /tmp directory. - Arguments: - src_list {list} -- A list of file paths + Parameters + ---------- + src_list : list + A list of file paths. """ module = AnsibleModuleHelper(argument_spec={}) tmp_prefix = tempfile.gettempprefix() @@ -2278,12 +2548,17 @@ def is_member_wildcard(src): """Determine whether src specifies a data set member wildcard in the form 'SOME.DATA.SET(*)' or 'SOME.DATA.SET(ABC*)' - Arguments: - src {str} -- The data set name - - Returns: - re.Match -- If the data set specifies a member wildcard - None -- If the data set does not specify a member wildcard + Parameters + ---------- + src : str + The data set name. + + Returns + ------- + re.Match + If the data set specifies a member wildcard. + None + If the data set does not specify a member wildcard. """ return fullmatch( r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}\([A-Z$#@\*]{1}[A-Z0-9$#@\*]{0,7}\)$", @@ -2301,22 +2576,31 @@ def get_attributes_of_any_dataset_created( asa_text, volume=None ): - """ - Get the attributes of dataset created by the function allocate_destination_data_set + """Get the attributes of dataset created by the function allocate_destination_data_set except for VSAM. - Arguments: - dest (str) -- Name of the destination data set. - src_ds_type (str) -- Source of the destination data set. - src (str) -- Name of the source data set, used as a model when appropiate. - src_name (str) -- Extraction of the source name without the member pattern. - is_binary (bool) -- Whether the data set will contain binary data. - asa_text (bool) -- Whether the data set will contain ASA control characters. - volume (str, optional) -- Volume where the data set should be allocated into. - - Returns: - params (dict) -- Parameters used for the dataset created as name, type, - space_primary, space_secondary, record_format, record_length, block_size and space_type + Parameters + ---------- + dest : str + Name of the destination data set. + src_ds_type : str + Source of the destination data set. + src : str + Name of the source data set, used as a model when appropiate. + src_name : str + Extraction of the source name without the member pattern. + is_binary : bool + Whether the data set will contain binary data. + asa_text : bool + Whether the data set will contain ASA control characters. + volume : str, optional + Volume where the data set should be allocated into. + + Returns + ------- + dict + Parameters used for the dataset created as name, type, + space_primary, space_secondary, record_format, record_length, block_size and space_type. """ params = {} if src_ds_type == "USS": @@ -2368,23 +2652,37 @@ def allocate_destination_data_set( Allocates a new destination data set to copy into, erasing a preexistent one if needed. - Arguments: - src (str) -- Name of the source data set, used as a model when appropiate. - dest (str) -- Name of the destination data set. - src_ds_type (str) -- Source of the destination data set. - dest_ds_type (str) -- Type of the destination data set. - dest_exists (bool) -- Whether the destination data set already exists. - force (bool) -- Whether to replace an existent data set. - is_binary (bool) -- Whether the data set will contain binary data. - executable (bool) -- Whether the data to copy is an executable dataset or file. - asa_text (bool) -- Whether the data to copy has ASA control characters. - dest_data_set (dict, optional) -- Parameters containing a full definition - of the new data set; they will take precedence over any other allocation logic. - volume (str, optional) -- Volume where the data set should be allocated into. - - Returns: - bool -- True if the data set was created, False otherwise. - dest_params (dict) -- Parameters used for the dataset created as name, + Parameters + ---------- + src : str + Name of the source data set, used as a model when appropiate. + dest : str + Name of the destination data set. + src_ds_type : str + Source of the destination data set. + dest_ds_type : str + Type of the destination data set. + dest_exists : bool + Whether the destination data set already exists. + force : bool + Whether to replace an existent data set. + is_binary : bool + Whether the data set will contain binary data. + executable : bool + Whether the data to copy is an executable dataset or file. + asa_text : bool + Whether the data to copy has ASA control characters. + dest_data_set : dict, optional + Parameters containing a full definition + of the new data set; they will take precedence over any other allocation logic. + volume : str, optional + Volume where the data set should be allocated into. + + Returns + ------- + Union(bool, dict) + True if the data set was created, False otherwise. + Parameters used for the dataset created as name, block_size, record_format, record_length, space_primary, space_secondary, space_type, type. """ @@ -2539,16 +2837,20 @@ def allocate_destination_data_set( def normalize_line_endings(src, encoding=None): - """ - Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes + """Normalizes src's encoding to IBM-037 (a dataset's default) and then normalizes its line endings to LF. - Arguments: - src (str) -- Path of a USS file. - encoding (dict, optional) -- Encoding options for the module. - - Returns: - str -- Path to the normalized file. + Parameters + ---------- + src : str + Path of a USS file. + encoding : dict, optional + Encoding options for the module. + + Returns + ------- + str + Path to the normalized file. """ # Before copying into a destination dataset, we'll make sure that # the source file doesn't contain any carriage returns that would @@ -2590,11 +2892,15 @@ def data_set_locked(dataset_name): Checks if a data set is in use and therefore locked (DISP=SHR), which is often caused by a long running task. Returns a boolean value to indicate the data set status. - Arguments: - dataset_name (str) - the data set name used to check if there is a lock. + Parameters + ---------- + dataset_name : str + The data set name used to check if there is a lock. - Returns: - bool -- True if the data set is locked, or False if the data set is not locked. + Returns + ------- + bool + True if the data set is locked, or False if the data set is not locked. """ # Using operator command "D GRS,RES=(*,{dataset_name})" to detect if a data set # is in use, when a data set is in use it will have "EXC/SHR and SHARE" @@ -2617,6 +2923,45 @@ def data_set_locked(dataset_name): def run_module(module, arg_def): + """Initialize module + + Parameters + ---------- + module : AnsibleModule + The AnsibleModule object from currently + running module. + arg_def : dict + Arguments. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + Source does not exist. + fail_json + Source is not readable. + fail_json + Encoding conversion is only valid for USS source. + fail_json + Destination is not writable. + fail_json + Any exception. + fail_json + Incompatible target type for source. + fail_json + Neither the source or the destination are ASA text files. + fail_json + Unable to write to dest because a task is accessing the data set. + fail_json + Alias support for text-based data sets is not available. + fail_json + Cannot write a partitioned data set (PDS) to a USS file. + fail_json + Destination already exists on the system, unable to overwrite unless force=True is specified. + fail_json + Unable to allocate destination data set. + """ # ******************************************************************** # Verify the validity of module args. BetterArgParser raises ValueError # when a parameter fails its validation check @@ -3126,6 +3471,13 @@ def run_module(module, arg_def): def main(): + """Run the zos_copy module core functions. + + Raises + ------ + fail_json + CopyOperationError. + """ module = AnsibleModule( argument_spec=dict( src=dict(type='str'), @@ -3322,6 +3674,22 @@ def main(): class EncodingConversionError(Exception): def __init__(self, src, f_code, t_code): + """Error converting encoding. + + Parameters + ---------- + src : str + Source where the file is in. + f_code : str + Encoding format the source is in. + t_code : str + Encoding format it tried to convert it to. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = "Unable to convert encoding for {0} from {1} to {2}".format( src, f_code, t_code ) @@ -3330,12 +3698,36 @@ def __init__(self, src, f_code, t_code): class NonExistentSourceError(Exception): def __init__(self, src): + """Error trying to find a dataset that doesn't exist. + + Parameters + ---------- + src : str + Source where the dataset was expected to be. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = "Source data set {0} does not exist".format(src) super().__init__(self.msg) class DataSetMemberAttributeError(Exception): def __init__(self, src): + """Error measuring a dataset member. + + Parameters + ---------- + src : str + Path the member is in. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = "Unable to get size and record length of member {0}".format(src) super().__init__(self.msg) @@ -3354,6 +3746,38 @@ def __init__( overwritten_members=None, new_members=None ): + """Error in a copy operation. + + Parameters + ---------- + msg : str + Human readable string describing the exception. + rc : int + Result code. + stdout : str + Standart output. + stderr : str + Standart error. + stdout_lines : str + Standart output divided in lines. + stderr_lines : str + Standart error divided in lines. + cmd : str + Command. + dest_exists : bool + If the destination exists. + overwritten_members : list + Members replaced before the error ocurred. + new_members : list + New members that could be copied before the error ocurred. + + Attributes + ---------- + overwritten_members : list + Members replaced before the error ocurred. + new_members : list + New members that could be copied before the error ocurred. + """ self.json_args = dict( msg=msg, rc=rc, From 4a00551ce8e06be169a7107ccbe37336abc39053 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 15:59:12 -0600 Subject: [PATCH 375/495] [Documentation][dd_statement] Add and estandarize docstring to dd_statement.py (#1323) * Add and estandarize docstring to dd_statement.py * Add docstrings to module_utils/ickdsf.py * Create changelog fragment * Remove changes in ickdsf.py * Modified the google style to numpy * Modify the changelog fragment * Modify docstrings * Standarize numpy style * Add and standarize docstrings on modules/zos_apf.py * Delete mistake * fixed docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1323-Update_docstring-dd_statement.yml | 3 + plugins/module_utils/dd_statement.py | 613 ++++++++++++------ 2 files changed, 433 insertions(+), 183 deletions(-) create mode 100644 changelogs/fragments/1323-Update_docstring-dd_statement.yml diff --git a/changelogs/fragments/1323-Update_docstring-dd_statement.yml b/changelogs/fragments/1323-Update_docstring-dd_statement.yml new file mode 100644 index 000000000..6d94b2a94 --- /dev/null +++ b/changelogs/fragments/1323-Update_docstring-dd_statement.yml @@ -0,0 +1,3 @@ +trivial: + - dd_statement - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1323). \ No newline at end of file diff --git a/plugins/module_utils/dd_statement.py b/plugins/module_utils/dd_statement.py index 57b7bcdad..5bef5d81a 100644 --- a/plugins/module_utils/dd_statement.py +++ b/plugins/module_utils/dd_statement.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -31,12 +31,24 @@ class DDStatement(object): def __init__(self, name, definition): """A Python representation of a z/OS DD statement. - Args: - name (str): The DD name to use for this DD statement. - definition (Union[DataDefinition, list[DataDefinition]]): One or more DataDefinition objects for the DD. - - Raises: - ValueError: When a value other than a DataDefinition is provided for definition parameter. + Parameters + ---------- + name : str + The DD name to use for this DD statement. + definition : Union[DataDefinition, list[DataDefinition]] + One or more DataDefinition objects for the DD. + + Attributes + ---------- + name : str + The DD name to use for this DD statement. + definition : Union[DataDefinition, list[DataDefinition]] + One or more DataDefinition objects for the DD. + + Raises + ------ + ValueError + When a value other than a DataDefinition is provided for definition parameter. """ self.name = name self.definition = definition @@ -46,8 +58,10 @@ def get_mvscmd_string(self): """Build the string representing this DD statement to be used as part of mvscmd/mvscmdauth call. - Returns: - str: The string representation of this DD statement, as expected by mvscmd. + Returns + ------- + str + The string representation of this DD statement, as expected by mvscmd. """ mvscmd_string = "--{0}=".format(self.name) if isinstance(self.definition, list): @@ -76,8 +90,10 @@ def _assert_valid_data_definition(self): """Assert that the provided single data set definition is not an invalid type. - Raises: - ValueError: When an invalid type is specified in DD concatenation. + Raises + ------ + ValueError + When an invalid type is specified in DD concatenation. """ if not isinstance(self.definition, DataDefinition): raise ValueError("DDStatement expects an object of type DataDefinition.") @@ -86,8 +102,10 @@ def _assert_valid_concatenation(self): """Assert that the provided data set concatenation does not contain any invalid types. - Raises: - ValueError: When an invalid type is specified in DD concatenation. + Raises + ------ + ValueError + When an invalid type is specified in DD concatenation. """ for dd in self.definition: if not isinstance( @@ -102,8 +120,15 @@ class DataDefinition(object): def __init__(self, name): """Generic DD data type to be used in a DDStatement. - Args: - name (str): The name used to refer to the resource pointed to by the DD. + Parameters + ---------- + name : str + The name used to refer to the resource pointed to by the DD. + + Attributes + ---------- + name : str + The name used to refer to the resource pointed to by the DD. """ self.name = name @@ -111,8 +136,10 @@ def _build_arg_string(self): """Build a string representing the arguments of this particular data type to be used by mvscmd/mvscmdauth. - Raises: - NotImplementedError: When the abstract version of the method is called. + Raises + ------ + NotImplementedError + When the abstract version of the method is called. """ raise NotImplementedError @@ -120,13 +147,19 @@ def _append_mvscmd_string(self, string, variable_name, variable): """Appends additional arguments to a formatted mvscmd DD name string. If no values are provided, returns string as provided. - Args: - string (str): The string to append an argument to. - variable_name (str): The name of the argument to use as expected by mvscmd. - variable (Union[str, int, list[str, int]]): The argument value to append. - - Returns: - str: The provided string with additional arguments appended. + Parameters + ---------- + string : str + The string to append an argument to. + variable_name : str + The name of the argument to use as expected by mvscmd. + variable : Union[str, int, list[str, int]] + The argument value to append. + + Returns + ------- + str + The provided string with additional arguments appended. """ if ( variable is None @@ -159,37 +192,89 @@ def __init__( """File DD data type to be used in a DDStatement. Defaults and validation are handled my mvscmd. - Args: - path_name (str): An absolute UNIX file path. - normal_disposition (str, optional): What to do with path after normal program termination. - May be one of keep, delete. - Defaults to None. - conditional_disposition (str, optional): What to do with path after abnormal program termination. - May be one of keep, delete. - Defaults to None. - path_mode (Union[str, int], optional): The file access attributes for the UNIX file. - Provide in chmod-like number format. Defaults to None. - access_group (str, optional): the access mode for UNIX file. - Options are: ORDWR, ORDONLY, OWRONLY. - Defaults to None. - status_group (list[str], optional): the status for UNIX file. - Specify up to 6 of: OCREAT, OEXCL, OAPPEND, ONOCTTY, ONONBLOCK, OSYNC, OTRUNC. - Defaults to None. - file_data (str, optional): the type of data that is (or will be) stored in the UNIX file. - Defaults to None. - record_length (int, optional): the specified logical record length for the - UNIX file being allocated. This is required in situations where the data will be processed as - records and therefore, the record length, block size and record format need to be supplied since - a UNIX file would normally be treated as a stream of bytes. - Defaults to None. - block_size (int, optional): the specified block size for the UNIX file - being allocated since a UNIX file would normally - be treated as a stream of bytes. - Defaults to None. - record_format (str, optional): the specified record format for the UNIX file - being allocated since an UNIX file would normally - be treated as a stream of bytes. - Defaults to None. + Parameters + ---------- + path_name : str + An absolute UNIX file path. + normal_disposition : str, optional + What to do with path after normal program termination. + May be one of keep, delete. + Defaults to None. + conditional_disposition : str, optional + What to do with path after abnormal program termination. + May be one of keep, delete. + Defaults to None. + path_mode : Union[str, int], optional + The file access attributes for the UNIX file. + Provide in chmod-like number format. Defaults to None. + access_group : str, optional + The access mode for UNIX file. + Options are: ORDWR, ORDONLY, OWRONLY. + Defaults to None. + status_group : list[str], optional + The status for UNIX file. + Specify up to 6 of: OCREAT, OEXCL, OAPPEND, ONOCTTY, ONONBLOCK, OSYNC, OTRUNC. + Defaults to None. + file_data : str, optional + The type of data that is (or will be) stored in the UNIX file. + Defaults to None. + record_length : int, optional + The specified logical record length for the + UNIX file being allocated. This is required in situations where the data will be processed as + records and therefore, the record length, block size and record format need to be supplied since + a UNIX file would normally be treated as a stream of bytes. + Defaults to None. + block_size : int, optional + The specified block size for the UNIX file + being allocated since a UNIX file would normally + be treated as a stream of bytes. + Defaults to None. + record_format : str, optional + The specified record format for the UNIX file + being allocated since an UNIX file would normally + be treated as a stream of bytes. + Defaults to None. + + Attributes + ---------- + normal_disposition : str, optional + What to do with path after normal program termination. + May be one of keep, delete. + Defaults to None. + conditional_disposition : str, optional + What to do with path after abnormal program termination. + May be one of keep, delete. + Defaults to None. + path_mode : Union[str, int], optional + The file access attributes for the UNIX file. + Provide in chmod-like number format. Defaults to None. + access_group : str, optional + The access mode for UNIX file. + Options are: ORDWR, ORDONLY, OWRONLY. + Defaults to None. + status_group : list[str], optional + The status for UNIX file. + Specify up to 6 of: OCREAT, OEXCL, OAPPEND, ONOCTTY, ONONBLOCK, OSYNC, OTRUNC. + Defaults to None. + file_data : str, optional + The type of data that is (or will be) stored in the UNIX file. + Defaults to None. + record_length : int, optional + The specified logical record length for the + UNIX file being allocated. This is required in situations where the data will be processed as + records and therefore, the record length, block size and record format need to be supplied since + a UNIX file would normally be treated as a stream of bytes. + Defaults to None. + block_size : int, optional + The specified block size for the UNIX file + being allocated since a UNIX file would normally + be treated as a stream of bytes. + Defaults to None. + record_format : str, optional + The specified record format for the UNIX file + being allocated since an UNIX file would normally + be treated as a stream of bytes. + Defaults to None. """ super().__init__(path_name) self.normal_disposition = normal_disposition @@ -205,6 +290,11 @@ def __init__( def _build_arg_string(self): """Build a string representing the arguments of this particular data type to be used by mvscmd/mvscmdauth. + + Returns + ------- + str + String to be used by mvscmd/mvscmdauth. """ mvscmd_string = "" mvscmd_string = self._append_mvscmd_string( @@ -270,87 +360,181 @@ def __init__( """Dataset DD data type to be used in a DDStatement. Defaults and validation are handled my mvscmd. - Args: - dataset_name (str): The name of the dataset to associate with the DD statement. - disposition (str, optional): The expected disposition of the dataset. - Valid options are: EXCL, OLD, SHR, MOD, NEW. - Defaults to "". - type (str, optional): The type of dataset. - Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. - Defaults to None. - primary (int, optional): The amount of primary space to allocate for the dataset. - Defaults to None. - primary_unit (str, optional): The unit of size to use when specifying primary space. - May be one of: K or KB (kilobytes), M or MB (megabytes), - G or GB (gigabytes), C or CYL (cylinders), T or TRK (tracks). - Defaults to "TRK". - secondary (int, optional): The amount of secondary space to allocate for the dataset. - Defaults to None. - secondary_unit (str, optional): The unit of size to use when specifying secondary space. - May be one of: K or KB (kilobytes), M or MB (megabytes), - G or GB (gigabytes), C or CYL (cylinders), T or TRK (tracks). - Defaults to "TRK". - normal_disposition (str, optional): tells the system what to do with the data set after normal termination of the program. - Valid options are: delete, keep, catalog/catlg, uncatalog/uncatlg. - Defaults to None. - conditional_disposition ([type], optional): tells the system what to do with the data set after abnormal termination of the program. - Valid options are: delete, keep, catalog/catlg, uncatalog/uncatlg. - Defaults to None. - block_size (int, optional): The block size of the data set. - Defaults to None. - directory_blocks (int, optional): The number of directory blocks to allocate for the data set. - Defaults to None. - record_format (str, optional): The record format of the dataset. - Valid options are: FB, VB, FBA, VBA, U. - Defaults to None. - record_length (int, optional): The length, in bytes, of each record in the data set. - Defaults to None. - storage_class (str, optional): the storage class for an SMS-managed dataset. - Not valid for datasets that are not SMS-managed. - Note that all non-linear VSAM datasets are SMS-managed. - Defaults to None. - data_class (str, optional): the data class for an SMS-managed dataset. - Optional for SMS-managed datasets that do not match an SMS-rule. - Not valid for datasets that are not SMS-managed. - Note that all non-linear VSAM datasets are SMS-managed. - Defaults to None. - management_class (str, optional): is the management class for an SMS-managed dataset. - Optional for SMS-managed datasets that do not match an SMS-rule. - Not valid for datasets that are not SMS-managed. - Note that all non-linear VSAM datasets are SMS-managed. - Defaults to None. - key_length (int, optional): The key length of a record. - Required for Key Sequenced Datasets (KSDS). - Defaults to None. - key_offset (int, optional): The key offset is the position of the first byte of the key - in each logical record of a the specified VSAM data set. - If the key is at the beginning of the logical record, the offset is zero. - Required for Key Sequenced Datasets (KSDS). - Defaults to None. - volumes (Union[str, list[str]], optional): a list of volume serials. - When providing multiple volumes, processing will begin with - the first volume in the provided list. Offline volumes are not considered. - Volumes can always be provided when not using SMS. - When using SMS, volumes can be provided when the storage class being used - has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. - Defaults to None. - dataset_key_label (str, optional): The label for the encryption key used by the system to encrypt the data set. - Only applicable when using encrypted datasets. - Defaults to None. - key_label1 (str, optional): The label for the key encrypting key used by the Encryption Key Manager. - Only applicable when using encrypted datasets. - Defaults to None. - key_encoding1 (str, optional): How the label for the key encrypting key specified by keylab1 is encoded by the Encryption Key Manager. - Valid values are: L, H - Only applicable when using encrypted datasets. - Defaults to None. - key_label2 (str, optional): The label for the key encrypting key used by the Encryption Key Manager. - Only applicable when using encrypted datasets. - Defaults to None. - key_encoding2 (str, optional): How the label for the key encrypting key specified by keylab2 is encoded by the Encryption Key Manager. - Valid values are: L, H - Only applicable when using encrypted datasets. - Defaults to None. + Parameters + ---------- + dataset_name : str + The name of the dataset to associate with the DD statement. + disposition : str, optional + The expected disposition of the dataset. + Valid options are: EXCL, OLD, SHR, MOD, NEW. + Defaults to "". + type : str, optional + The type of dataset. + Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. + Defaults to None. + primary : int, optional + The amount of primary space to allocate for the dataset. + Defaults to None. + primary_unit : str, optional + The unit of size to use when specifying primary space. + May be one of: K or KB (kilobytes), M or MB (megabytes), + G or GB (gigabytes), C or CYL (cylinders), T or TRK (tracks). + Defaults to "TRK". + secondary : int, optional + The amount of secondary space to allocate for the dataset. + Defaults to None. + secondary_unit : str, optional + The unit of size to use when specifying secondary space. + May be one of: K or KB (kilobytes), M or MB (megabytes), + G or GB (gigabytes), C or CYL (cylinders), T or TRK (tracks). + Defaults to "TRK". + normal_disposition : str, optional + Tells the system what to do with the data set after normal termination of the program. + Valid options are: delete, keep, catalog/catlg, uncatalog/uncatlg. + Defaults to None. + conditional_disposition : str, optional + Tells the system what to do with the data set after abnormal termination of the program. + Valid options are: delete, keep, catalog/catlg, uncatalog/uncatlg. + Defaults to None. + block_size : int, optional + The block size of the data set. + Defaults to None. + directory_blocks : int, optional + The number of directory blocks to allocate for the data set. + Defaults to None. + record_format : str, optional + The record format of the dataset. + Valid options are: FB, VB, FBA, VBA, U. + Defaults to None. + record_length : int, optional + The length, in bytes, of each record in the data set. + Defaults to None. + storage_class : str, optional + The storage class for an SMS-managed dataset. + Not valid for datasets that are not SMS-managed. + Note that all non-linear VSAM datasets are SMS-managed. + Defaults to None. + data_class : str, optional + The data class for an SMS-managed dataset. + Optional for SMS-managed datasets that do not match an SMS-rule. + Not valid for datasets that are not SMS-managed. + Note that all non-linear VSAM datasets are SMS-managed. + Defaults to None. + management_class : str, optional + Is the management class for an SMS-managed dataset. + Optional for SMS-managed datasets that do not match an SMS-rule. + Not valid for datasets that are not SMS-managed. + Note that all non-linear VSAM datasets are SMS-managed. + Defaults to None. + key_length : int, optional + The key length of a record. + Required for Key Sequenced Datasets (KSDS). + Defaults to None. + key_offset : int, optional + The key offset is the position of the first byte of the key + in each logical record of a the specified VSAM data set. + If the key is at the beginning of the logical record, the offset is zero. + Required for Key Sequenced Datasets (KSDS). + Defaults to None. + volumes : Union[str, list[str]], optional + A list of volume serials. + When providing multiple volumes, processing will begin with + the first volume in the provided list. Offline volumes are not considered. + Volumes can always be provided when not using SMS. + When using SMS, volumes can be provided when the storage class being used + has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. + Defaults to None. + dataset_key_label : str, optional + The label for the encryption key used by the system to encrypt the data set. + Only applicable when using encrypted datasets. + Defaults to None. + key_label1 : str, optional + The label for the key encrypting key used by the Encryption Key Manager. + Only applicable when using encrypted datasets. + Defaults to None. + key_encoding1 : str, optional + How the label for the key encrypting key specified by keylab1 is encoded by the Encryption Key Manager. + Valid values are: L, H. + Only applicable when using encrypted datasets. + Defaults to None. + key_label2 : str, optional + The label for the key encrypting key used by the Encryption Key Manager. + Only applicable when using encrypted datasets. + Defaults to None. + key_encoding2 : str, optional + How the label for the key encrypting key specified by keylab2 is encoded by the Encryption Key Manager. + Valid values are: L, H + Only applicable when using encrypted datasets. + Defaults to None. + + Attributes + ---------- + disposition : str + The expected disposition of the dataset. + type : str + The type of dataset. + primary : int + The amount of primary space to allocate for the dataset. + secondary : int + The amount of secondary space to allocate for the dataset. + normal_disposition : str + tells the system what to do with the data set after normal termination of the program. + conditional_disposition : str + Tells the system what to do with the data set after abnormal termination of the program. + block_size : int + The block size of the data set. + directory_blocks : int + The number of directory blocks to allocate for the data set. + record_format : str + The record format of the dataset. + record_length : int + The length, in bytes, of each record in the data set. + storage_class : str + The storage class for an SMS-managed dataset. + Not valid for datasets that are not SMS-managed. + Note that all non-linear VSAM datasets are SMS-managed. + data_class : str + The data class for an SMS-managed dataset. + Optional for SMS-managed datasets that do not match an SMS-rule. + Not valid for datasets that are not SMS-managed. + Note that all non-linear VSAM datasets are SMS-managed. + management_class : str + Is the management class for an SMS-managed dataset. + Optional for SMS-managed datasets that do not match an SMS-rule. + Not valid for datasets that are not SMS-managed. + Note that all non-linear VSAM datasets are SMS-managed. + key_length : int + The key length of a record. + Required for Key Sequenced Datasets (KSDS). + key_offset : int + The key offset is the position of the first byte of the key + in each logical record of a the specified VSAM data set. + If the key is at the beginning of the logical record, the offset is zero. + Required for Key Sequenced Datasets (KSDS). + volumes : Union[str, list[str]] + A list of volume serials. + When providing multiple volumes, processing will begin with + the first volume in the provided list. Offline volumes are not considered. + Volumes can always be provided when not using SMS. + When using SMS, volumes can be provided when the storage class being used + has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. + dataset_key_label : str + The label for the encryption key used by the system to encrypt the data set. + Only applicable when using encrypted datasets. + key_label1 : str + The label for the key encrypting key used by the Encryption Key Manager. + Only applicable when using encrypted datasets. + key_encoding1 : str + How the label for the key encrypting key specified by keylab1 is encoded by the Encryption Key Manager. + Valid values are: L, H + Only applicable when using encrypted datasets. + key_label2 : str + The label for the key encrypting key used by the Encryption Key Manager. + Only applicable when using encrypted datasets. + key_encoding2 : str + How the label for the key encrypting key specified by keylab2 is encoded by the Encryption Key Manager. + Valid values are: L, H + Only applicable when using encrypted datasets. """ super().__init__(dataset_name) self.disposition = disposition @@ -401,6 +585,11 @@ def __init__( def _build_arg_string(self): """Build a string representing the arguments of this particular data type to be used by mvscmd/mvscmdauth. + + Returns + ------- + str + String to be used by mvscmd/mvscmdauth. """ if not self.disposition: return "" @@ -470,14 +659,21 @@ class VolumeDefinition(DataDefinition): def __init__(self, volume_name): """Volume DD data type to be used in a DDStatement. - Args: - volume_name (str): The volume name to associate with the DD statement. + Parameters + ---------- + volume_name : str + The volume name to associate with the DD statement. """ super().__init__(volume_name) def _build_arg_string(self): """Build a string representing the arguments of this particular data type to be used by mvscmd/mvscmdauth. + + Returns + ------- + str + ',vol' """ return ",vol" @@ -491,6 +687,11 @@ def __init__(self): def _build_arg_string(self): """Build a string representing the arguments of this particular data type to be used by mvscmd/mvscmdauth. + + Returns + ------- + str + '' """ return "" @@ -504,6 +705,11 @@ def __init__(self): def _build_arg_string(self): """Build a string representing the arguments of this particular data type to be used by mvscmd/mvscmdauth. + + Returns + ------- + str + '' """ return "" @@ -522,21 +728,28 @@ def __init__( """Stdin DD Data type to be used in a DDStatement. This should be used in cases where "DD *" would be used in a jcl. - Args: - content (Union[str, list[str]]): The content to write to temporary data set / stdin. - Content can be provided as a string or a list of strings where each list item - corresponds to a single line. - record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. - Defaults to "FB". - space_primary (int, optional): The amount of primary space to allocate for the dataset. - Defaults to 5. - space_secondary (int, optional): The amount of secondary space to allocate for the dataset. - Defaults to 5. - space_type (str, optional): The unit of measurement to use when defining primary and secondary space. - Defaults to "M". - record_length (int, optional): The length, in bytes, of each record in the data set. - Defaults to 80. + Parameters + ---------- + content : Union[str, list[str]] + The content to write to temporary data set / stdin. + Content can be provided as a string or a list of strings where each list item + corresponds to a single line. + record_format : str, optional + The record format to use for the dataset. + Valid options are: FB, VB, FBA, VBA, U. + Defaults to "FB". + space_primary : int, optional + The amount of primary space to allocate for the dataset. + Defaults to 5. + space_secondary : int, optional + The amount of secondary space to allocate for the dataset. + Defaults to 5. + space_type : str, optional + The unit of measurement to use when defining primary and secondary space. + Defaults to "M". + record_length : int, optional + The length, in bytes, of each record in the data set. + Defaults to 80. """ self.name = None name = DataSet.create_temp( @@ -553,12 +766,19 @@ def __init__( DataSet.write(name, content) def __del__(self): + """Delete dataset with the name of this object + """ if self.name: DataSet.delete(self.name) def _build_arg_string(self): """Build a string representing the arguments of this particular data type to be used by mvscmd/mvscmdauth. + + Returns + ------- + str + '' """ return "" @@ -567,21 +787,29 @@ class InputDefinition(StdinDefinition): """Input DD Data type to be used in a DDStatement. This should be used in cases where "DD *" would be used in a jcl. Added for consistent naming with OutputDefinition, is exact same as StdinDefinition. - Args: - content (Union[str, list[str]]): The content to write to temporary data set / stdin. - Content can be provided as a string or a list of strings where each list item - corresponds to a single line. - record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. - Defaults to "FB". - space_primary (int, optional): The amount of primary space to allocate for the dataset. - Defaults to 5. - space_secondary (int, optional): The amount of secondary space to allocate for the dataset. - Defaults to 5. - space_type (str, optional): The unit of measurement to use when defining primary and secondary space. - Defaults to "M". - record_length (int, optional): The length, in bytes, of each record in the data set. - Defaults to 80. + + Parameters + ---------- + content : Union[str, list[str]] + The content to write to temporary data set / stdin. + Content can be provided as a string or a list of strings where each list item + corresponds to a single line. + record_format : str, optional + The record format to use for the dataset. + Valid options are: FB, VB, FBA, VBA, U. + Defaults to "FB". + space_primary : int, optional + The amount of primary space to allocate for the dataset. + Defaults to 5. + space_secondary : int, optional + The amount of secondary space to allocate for the dataset. + Defaults to 5. + space_type : str, optional + The unit of measurement to use when defining primary and secondary space. + Defaults to "M". + record_length _ int, optional + The length, in bytes, of each record in the data set. + Defaults to 80. """ pass @@ -602,18 +830,24 @@ def __init__( output from a program but does not want to store in a persistent data set or file. - Args: - record_format (str, optional): The record format to use for the dataset. - Valid options are: FB, VB, FBA, VBA, U. - Defaults to "VB". - space_primary (int, optional): The amount of primary space to allocate for the dataset. - Defaults to 5. - space_secondary (int, optional): The amount of secondary space to allocate for the dataset. - Defaults to 5. - space_type (str, optional): The unit of measurement to use when defining primary and secondary space. - Defaults to "M". - record_length (int, optional): The length, in bytes, of each record in the data set. - Defaults to 80. + Parameters + ---------- + record_format : str, optional + The record format to use for the dataset. + Valid options are: FB, VB, FBA, VBA, U. + Defaults to "VB". + space_primary : int, optional + The amount of primary space to allocate for the dataset. + Defaults to 5. + space_secondary : int, optional + The amount of secondary space to allocate for the dataset. + Defaults to 5. + space_type : str, optional + The unit of measurement to use when defining primary and secondary space. + Defaults to "M". + record_length : int, optional + The length, in bytes, of each record in the data set. + Defaults to 80. """ self.name = None name = DataSet.create_temp( @@ -627,12 +861,19 @@ def __init__( super().__init__(name) def __del__(self): + """Delete dataset with the name of this object + """ if self.name: DataSet.delete(self.name) def _build_arg_string(self): """Build a string representing the arguments of this particular data type to be used by mvscmd/mvscmdauth. + + Returns + ------- + str + '' """ return "" @@ -644,9 +885,10 @@ def __init__(self, tmphlq=None): A temporary data set will be created for use in cases where VIO is unavailable. Defaults for VIODefinition should be sufficient. - Args: - tmphlq (str, optional): HLQ to be used for temporary datasets. Defaults to None. - + Parameters + ---------- + tmphlq : str, optional + HLQ to be used for temporary datasets. Defaults to None. """ if tmphlq: hlq = tmphlq @@ -667,5 +909,10 @@ def __del__(self): def _build_arg_string(self): """Build a string representing the arguments of this particular data type to be used by mvscmd/mvscmdauth. + + Returns + ------- + str + ',vio' """ return ",vio" From 7e1cd7d435ef8d7561b8f6e60dd579ab3699524c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 16:00:28 -0600 Subject: [PATCH 376/495] [Documentation][mvs_cmd] Add docstrings to module_utils/mvs_cmd.py (#1334) * Add docstrings to module_utils/mvs_cmd.py * Create changelog fragment * Modify the google style to numpy * Standarize numpy style --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1334-update-docstring-mcs_cmd.yml | 3 + plugins/module_utils/mvs_cmd.py | 199 +++++++++++++----- 2 files changed, 149 insertions(+), 53 deletions(-) create mode 100644 changelogs/fragments/1334-update-docstring-mcs_cmd.yml diff --git a/changelogs/fragments/1334-update-docstring-mcs_cmd.yml b/changelogs/fragments/1334-update-docstring-mcs_cmd.yml new file mode 100644 index 000000000..ac2ad367f --- /dev/null +++ b/changelogs/fragments/1334-update-docstring-mcs_cmd.yml @@ -0,0 +1,3 @@ +trivial: + - mvs_cmd - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1334). \ No newline at end of file diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index 6331a1772..7307ff300 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -26,11 +26,20 @@ def iebcopy(cmd, dds=None, authorized=False): a partitioned data set into a sequential data set (called an unload data set or PDSU), and to copy members from the backup into a partitioned data set. - Arguments: - cmd {str} -- The command to pass to IEBCOPY - dds {dict} -- Any DD statements to pass to MVS command - authorized {bool} -- Whether the command should be run in authorized - mode + Parameters + ---------- + cmd : str + The command to pass to IEBCOPY. + dds : dict + Any DD statements to pass to MVS command. + authorized : bool + Whether the command should be run in authorized + mode. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("IEBCOPY", cmd, dds, authorized) @@ -40,11 +49,20 @@ def iebedit(cmd, dds=None, authorized=False): or job steps. These jobs or job steps can be entered into the job stream at a later time for processing. - Arguments: - cmd {str} -- The command to pass to IEBEDIT - dds {dict} -- Any DD statements to pass to MVS command - authorized {bool} -- Whether the command should be run in authorized - mode + Parameters + ---------- + cmd : str + The command to pass to IEBEDIT. + dds : dict + Any DD statements to pass to MVS command. + authorized : bool + Whether the command should be run in authorized + mode. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("IEBEDIT", cmd, dds, authorized) @@ -56,11 +74,19 @@ def iebcompr(cmd, dds=None, authorized=False): undefined records from blocked or unblocked data sets or members can also be compared. However, you should not use IEBCOMPR to compare load modules. - Arguments: - cmd {str} -- The command to pass to IEBCOMPR - dds {dict} -- Any DD statements to pass to MVS command - authorized {bool} -- Whether the command should be run in authorized - mode + Parameters + ---------- + cmd : str + The command to pass to IEBCOMPR. + dds : dict + Any DD statements to pass to MVS command. + authorized : bool + Whether the command should be run in authorized + mode. + + Returns: + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("IEBCOMPR", cmd, dds, authorized) @@ -70,11 +96,20 @@ def iebdg(cmd, dds=None, authorized=False): data to be used as a programming debugging aid. This pattern of data can then be analyzed quickly for predictable results. - Arguments: - cmd {str} -- The command to pass to IEBDG - dds {dict} -- Any DD statements to pass to MVS command - authorized {bool} -- Whether the command should be run in authorized - mode + Parameters + ---------- + cmd : str + The command to pass to IEBDG. + dds : dict + Any DD statements to pass to MVS command. + authorized : bool + Whether the command should be run in authorized + mode. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("IEBDG", cmd, dds, authorized) @@ -98,11 +133,20 @@ def iebgener(cmd, dds=None, authorized=False): manipulate input data, create keys, and handle permanent input/output errors. - Arguments: - cmd {str} -- The command to pass to IEBGENER - dds {dict} -- Any DD statements to pass to MVS command - authorized {bool} -- Whether the command should be run in authorized - mode + Parameters + ---------- + cmd : str + The command to pass to IEBGENER. + dds : dict + Any DD statements to pass to MVS command. + authorized : bool + Whether the command should be run in authorized + mode. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("IEBGENER", cmd, dds, authorized) @@ -112,11 +156,20 @@ def idcams(cmd, dds=None, authorized=False): primarily to define and manage VSAM data sets and integrated catalog facility catalogs. - Arguments: - cmd {str} -- The command to pass to IDCAMS - dds {dict} -- Any DD statements to pass to MVS command - authorized {bool} -- Whether the command should be run in authorized - mode + Parameters + ---------- + cmd : str + The command to pass to IDCAMS. + dds : dict + Any DD statements to pass to MVS command. + authorized : bool + Whether the command should be run in authorized + mode. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("IDCAMS", cmd, dds, authorized) @@ -129,11 +182,20 @@ def ikjeft01(cmd, dds=None, authorized=False): and/or TSO/E REXX. Optionally, you can also invoke other environments, such as ISPF , allowing you to run ISPF Dialogs in a batch environment. - Arguments: - cmd {str} -- The command to pass to IKJEFT01 - dds {dict} -- Any DD statements to pass to MVS command - authorized {bool} -- Whether the command should be run in authorized - mode + Parameters + ---------- + cmd : str + The command to pass to IKJEFT01. + dds : dict + Any DD statements to pass to MVS command. + authorized : bool + Whether the command should be run in authorized + mode. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("IKJEFT01", cmd, dds, authorized) @@ -144,11 +206,20 @@ def iehlist(cmd, dds=None, authorized=False): non-indexed volume table of contents. Any number of listings can be requested in a single execution of the program. - Arguments: - cmd {str} -- The command to pass to IEHLIST - dds {dict} -- Any DD statements to pass to MVS command - authorized {bool} -- Whether the command should be run in authorized - mode + Parameters + ---------- + cmd : str + The command to pass to IEHLIST. + dds : dict + Any DD statements to pass to MVS command. + authorized : bool + Whether the command should be run in authorized + mode. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("IEHLIST", cmd, dds, authorized) @@ -159,10 +230,19 @@ def amaterse(cmd="", dds=None, authorized=False): to another site, typically employing FTP as the transmission mechanism. A complementary unpack service is provided to create a similar data set at the receiving site. - Arguments: - dds {dict} -- Any DD statements to pass to MVS command - authorized {bool} -- Whether the command should be run in authorized - mode + + Parameters + ---------- + dds : dict + Any DD statements to pass to MVS command. + authorized : bool + Whether the command should be run in authorized + mode. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("AMATERSE", "", dds, authorized) @@ -173,6 +253,11 @@ def adrdssu(cmd, dds=None, authorized=False): wildcard-named files. Is a DFSMSdss utility that provides backup and recovery functions at both the data set and volume levels. + + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ return _run_mvs_command("ADRDSSU", cmd, dds, authorized) @@ -180,17 +265,25 @@ def adrdssu(cmd, dds=None, authorized=False): def _run_mvs_command(pgm, cmd, dd=None, authorized=False): """Run a particular MVS command. - Arguments: - pgm {str} -- The MVS program to run - cmd {str} -- The input command to pass to the program - - Keyword Arguments: - dd {dict} -- The DD definitions required by the program. (Default {None}) - authorized {bool} -- Indicates whether the MVS program should run + Parameters + ---------- + pgm : str + The MVS program to run. + cmd : str + The input command to pass to the program. + + Keyword Parameters + ------------------ + dd : dict + The DD definitions required by the program. (Default {None}) + authorized : bool + Indicates whether the MVS program should run as authorized. (Default {False}) - Returns: - tuple[int, str, str] -- A tuple of return code, stdout and stderr + Returns + ------- + tuple(int, str, str) + A tuple of return code, stdout and stderr. """ module = AnsibleModuleHelper(argument_spec={}) sysprint = "sysprint" From 7159850b0878aa9a960e91ddbcc81ca531e693f6 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 16:00:50 -0600 Subject: [PATCH 377/495] [Documentation][template] Standarize docstrings in module_utils/template.py (#1335) * Standarize docstrings in module_utils/template.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style * fixed docs --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1335-update-docstring-template.yml | 3 + plugins/module_utils/template.py | 253 +++++++++++------- 2 files changed, 165 insertions(+), 91 deletions(-) create mode 100644 changelogs/fragments/1335-update-docstring-template.yml diff --git a/changelogs/fragments/1335-update-docstring-template.yml b/changelogs/fragments/1335-update-docstring-template.yml new file mode 100644 index 000000000..9020c18ae --- /dev/null +++ b/changelogs/fragments/1335-update-docstring-template.yml @@ -0,0 +1,3 @@ +trivial: + - template - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1335). \ No newline at end of file diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py index 419b997b2..a2a24c34f 100644 --- a/plugins/module_utils/template.py +++ b/plugins/module_utils/template.py @@ -36,6 +36,20 @@ def _process_boolean(arg, default=False): + """Process an argument to a boolean. + + Parameters + ---------- + arg : bool + Argument to convert. + default : bool + Output in case the operation fails. + + Returns + ------- + bool + Boolean value. + """ try: return boolean(arg) except TypeError: @@ -46,15 +60,21 @@ def create_template_environment(template_parameters, src, template_encoding=None """Parses boolean parameters for Jinja2 and returns a TemplateRenderer instance. - Arguments: - template_parametrs (dict): Parameters for creating the template environment. - src (str): Local path where the templates are located. - template_encoding (dict, optional): encoding used by the templates. If not - given, the default locale set in the system will be used. - - Returns: - TemplateRenderer -- Object with a new template environment ready to - render the templates found in src. + Parameters + ---------- + template_parameters : dict + Parameters for creating the template environment. + src : str + Local path where the templates are located. + template_encoding : dict, optional + Encoding used by the templates. If not + given, the default locale set in the system will be used. + + Returns + ------- + TemplateRenderer + Object with a new template environment ready to + render the templates found in src. """ if template_parameters.get("lstrip_blocks"): template_parameters["lstrip_blocks"] = _process_boolean(template_parameters.get("lstrip_blocks"), default=False) @@ -72,13 +92,6 @@ def create_template_environment(template_parameters, src, template_encoding=None class TemplateRenderer: - """This class implements functionality to load and render Jinja2 - templates. To add support for Jinja2 in a module, you need to include - the template.py doc fragment, add the options for configuring the Jinja2 - environment to the module's options, and instantiate this class to - render templates inside an action plugin. - """ - _ALLOWED_NEWLINE_DELIMITERS = ["\n", "\r", "\r\n"] _FIXABLE_NEWLINE_DELIMITERS = ["\\n", "\\r", "\\r\\n"] _NEWLINE_DELIMITER_SWAP = { @@ -105,45 +118,79 @@ def __init__( newline_sequence="\n", auto_reload=False, ): - """Initializes a new TemplateRenderer object with a Jinja2 + """This class implements functionality to load and render Jinja2 + templates. To add support for Jinja2 in a module, you need to include + the template.py doc fragment, add the options for configuring the Jinja2 + environment to the module's options, and instantiate this class to + render templates inside an action plugin. + + Initializes a new TemplateRenderer object with a Jinja2 environment that can use templates from a given directory. More information about Jinja2 templates and environments can be found at https://jinja.palletsprojects.com/en/3.0.x/api/. - Arguments: - template_path (str): Path to a Jinja2 template file or directory. - encoding (str): Encoding for rendered templates. - variable_start_string (str, optional): Marker for the beginning of - a statement to print a variable in Jinja2. - variable_end_string (str, optional): Marker for the end of - a statement to print a variable in Jinja2. - block_start_string (str, optional): Marker for the beginning of - a block in Jinja2. - block_end_string (str, optional): Marker for the end of a block - in Jinja2. - comment_start_string (str, optional): Marker for the beginning of - a comment in Jinja2. - comment_end_string (str, optional): Marker for the end of a comment - in Jinja2. - line_statement_prefix (str, optional): Prefix used by Jinja2 to identify - line-based statements. - line_comment_prefix (str, optional): Prefix used by Jinja2 to identify - comment lines. - lstrip_blocks (bool, optional): Whether Jinja2 should strip leading spaces - from the start of a line to a block. - trim_blocks (bool, optional): Whether Jinja2 should remove the first - newline after a block is removed. - keep_trailing_newline (bool, optional): Whether Jinja2 should keep the - first trailing newline at the end of a template after rendering. - newline_sequence (str, optional): Sequence that starts a newline in a - template. Valid values are '\n', '\r', '\r\n'. - auto_reload (bool, optional): Whether to reload a template file when it - has changed after creating the Jinja2 environment. - - Raises: - FileNotFoundError: When template_path points to a non-existent - file or directory. - ValueError: When the newline sequence is not valid. + Parameters + ---------- + template_path : str + Path to a Jinja2 template file or directory. + encoding : str + Encoding for rendered templates. + variable_start_string : str, optional + Marker for the beginning of + a statement to print a variable in Jinja2. + variable_end_string : str, optional + Marker for the end of + a statement to print a variable in Jinja2. + block_start_string : str, optional + Marker for the beginning of + a block in Jinja2. + block_end_string : str, optional + Marker for the end of a block + in Jinja2. + comment_start_string : str, optional + Marker for the beginning of + a comment in Jinja2. + comment_end_string : str, optional + Marker for the end of a comment + in Jinja2. + line_statement_prefix : str, optional + Prefix used by Jinja2 to identify + line-based statements. + line_comment_prefix : str, optional + Prefix used by Jinja2 to identify + comment lines. + lstrip_blocks : bool, optional + Whether Jinja2 should strip leading spaces + from the start of a line to a block. + trim_blocks : bool, optional + Whether Jinja2 should remove the first + newline after a block is removed. + keep_trailing_newline : bool, optional + Whether Jinja2 should keep the + first trailing newline at the end of a template after rendering. + newline_sequence : str, optional + Sequence that starts a newline in a + template. Valid values are '\n', '\r', '\r\n'. + auto_reload : bool, optional + Whether to reload a template file when it + has changed after creating the Jinja2 environment. + + Attributes + ---------- + encoding : str + Encoding for rendered templates. + template_dir : str + Dir with the template path. + templating_env : jinja2.environment + Environment created with the arguments as input. + + Raises + ------ + FileNotFoundError + When template_path points to a non-existent + file or directory. + ValueError + When the newline sequence is not valid. """ if not path.exists(template_path): raise FileNotFoundError("The template path {0} does not exist".format( @@ -191,26 +238,38 @@ def render_file_template(self, file_path, variables): """Loads a template from the templates directory and renders it using the Jinja2 environment configured in the object. - Arguments: - file_path (str): Relative path (from the template directory) - to a template. - variables (dict): Dictionary containing the variables and - their values that will be substituted in the template. - - Returns: - tuple -- Filepath to a temporary directory that contains the - rendered template, and the complete filepath to the - rendered template. - - Raises: - TemplateNotFound: When the template file doesn't exist in the - template directory. - TemplateError: When rendering of the template fails. - FileExistsError: When there is an error while trying to create the - temp directory for rendered templates. - PermissionError: When there is an error accessing the temp directory. - IOError: When there is an error writing the rendered template. - ValueError: When there is an error writing the rendered template. + Parameters + ---------- + file_path : str + Relative path (from the template directory) + to a template. + variables : dict + Dictionary containing the variables and + their values that will be substituted in the template. + + Returns + ------- + tuple(str,str) + Filepath to a temporary directory that contains the + rendered template, and the complete filepath to the + rendered template. + + Raises + ------ + TemplateNotFound + When the template file doesn't exist in the + template directory. + TemplateError + When rendering of the template fails. + FileExistsError + When there is an error while trying to create the + temp directory for rendered templates. + PermissionError + When there is an error accessing the temp directory. + IOError + When there is an error writing the rendered template. + ValueError + When there is an error writing the rendered template. """ try: template = self.templating_env.get_template(file_path) @@ -259,26 +318,38 @@ def render_dir_template(self, variables): """Loads all templates from a directory and renders them using the Jinja2 environment configured in the object. - Arguments: - variables (dict): Dictionary containing the variables and - their values that will be substituted in the template. - - Returns: - tuple -- Filepath to a temporary directory that contains the - rendered templates, and the complete filepath to the - rendered templates' directory. - - Raises: - TemplateNotFound: When the template file doesn't exist in the - template directory. - TemplateError: When rendering of the template fails. - FileExistsError: When there is an error while trying to create the - temp directory for rendered templates. - PermissionError: When there is an error accessing the temp directory. - OSError: When there is an error while trying to create the - temp directory for rendered templates. - IOError: When there is an error writing the rendered template. - ValueError: When there is an error writing the rendered template. + Parameters + ---------- + variables : dict + Dictionary containing the variables and + their values that will be substituted in the template. + + Returns + ------- + tuple(str,str) + Filepath to a temporary directory that contains the + rendered templates, and the complete filepath to the + rendered templates' directory. + + Raises + ------ + TemplateNotFound + When the template file doesn't exist in the + template directory. + TemplateError + When rendering of the template fails. + FileExistsError + When there is an error while trying to create the + temp directory for rendered templates. + PermissionError + When there is an error accessing the temp directory. + OSError + When there is an error while trying to create the + temp directory for rendered templates. + IOError + When there is an error writing the rendered template. + ValueError + When there is an error writing the rendered template. """ try: temp_parent_dir = tempfile.mkdtemp() From dd94cb279be58c44aef05b1c77e294c88c61ab44 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 16:01:08 -0600 Subject: [PATCH 378/495] [Documentation][vtoc] Add docstrings to module_utils/vtoc.py (#1337) * Add docstrings to module_utils/vtoc.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style --- .../fragments/1337-update-docstring-vtoc.yml | 3 + plugins/module_utils/vtoc.py | 243 ++++++++++++------ 2 files changed, 167 insertions(+), 79 deletions(-) create mode 100644 changelogs/fragments/1337-update-docstring-vtoc.yml diff --git a/changelogs/fragments/1337-update-docstring-vtoc.yml b/changelogs/fragments/1337-update-docstring-vtoc.yml new file mode 100644 index 000000000..71974c682 --- /dev/null +++ b/changelogs/fragments/1337-update-docstring-vtoc.yml @@ -0,0 +1,3 @@ +trivial: + - vtoc - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1337). \ No newline at end of file diff --git a/plugins/module_utils/vtoc.py b/plugins/module_utils/vtoc.py index 83b58b54e..12cd25656 100644 --- a/plugins/module_utils/vtoc.py +++ b/plugins/module_utils/vtoc.py @@ -24,14 +24,20 @@ def get_volume_entry(volume): """Retrieve VTOC information for all data sets with entries on the volume. - Arguments: - volume {str} -- The name of the volume. - - Raises: - VolumeTableOfContentsError: When any exception is raised during VTOC operations. - - Returns: - list[dict] -- List of dictionaries holding data set information from VTOC. + Parameters + ---------- + volume : str + The name of the volume. + + Returns + ------- + Union[dict] + List of dictionaries holding data set information from VTOC. + + Raises + ------ + VolumeTableOfContentsError + When any exception is raised during VTOC operations. """ try: stdin = " LISTVTOC FORMAT,VOL=3390={0}".format(volume.upper()) @@ -50,12 +56,17 @@ def get_data_set_entry(data_set_name, volume): """Retrieve VTOC information for a single data set on a volume. - Arguments: - data_set_name {str} -- The name of the data set to retrieve information for. - volume {str} -- The name of the volume. - - Returns: - dict -- The information for the data set found in VTOC. + Parameters + ---------- + data_set_name : str + The name of the data set to retrieve information for. + volume : str + The name of the volume. + + Returns + ------- + dict + The information for the data set found in VTOC. """ data_set = None data_sets = get_volume_entry(volume) @@ -72,12 +83,17 @@ def find_data_set_in_volume_output(data_set_name, data_sets): set if present. This method is useful when wanting to avoid multiple IEHLIST calls. - Arguments: - data_set_name {str} -- The name of the data set to retrieve information for. - data_sets {list[dict]} -- List of dictionaries holding data set information from VTOC. - - Returns: - dict -- The information for the data set found in VTOC. + Parameters + ---------- + data_set_name : str + The name of the data set to retrieve information for. + data_sets : list[dict] + List of dictionaries holding data set information from VTOC. + + Returns + ------- + dict + The information for the data set found in VTOC. """ if isinstance(data_sets, list): for data_set in data_sets: @@ -89,12 +105,17 @@ def find_data_set_in_volume_output(data_set_name, data_sets): def _iehlist(dd, stdin): """Calls IEHLIST program. - Arguments: - dd {str} -- Volume information to pass as DD statement. - stdin {str} -- Input to stdin. - - Returns: - str -- The sysprint response of IEHLIST. + Parameters + ---------- + dd : str + Volume information to pass as DD statement. + stdin : str + Input to stdin. + + Returns + ------- + str + The sysprint response of IEHLIST. """ module = AnsibleModuleHelper(argument_spec={}) response = None @@ -110,11 +131,15 @@ def _iehlist(dd, stdin): def _process_output(stdout): """Process output of LISTVTOC. - Arguments: - stdout {str} -- The output of LISTVTOC. + Parameters + ---------- + stdout : str + The output of LISTVTOC. - Returns: - list[dict] -- List of dictionaries holding data set information from VTOC. + Returns + ------- + Union[dict] + List of dictionaries holding data set information from VTOC. """ data_sets = [] data_set_strings = _separate_data_set_sections(stdout) @@ -126,11 +151,15 @@ def _process_output(stdout): def _separate_data_set_sections(contents): """Split LISTVTOC output into data set sections. - Arguments: - contents {str} -- The output of LISTVTOC. + Parameters + ---------- + contents : str + The output of LISTVTOC. - Returns: - list[str] -- LISTVTOC output separated into sections by data set. + Returns + ------- + Union[str] + LISTVTOC output separated into sections by data set. """ delimeter = "0---------------DATA SET NAME----------------" data_sets = re.split(delimeter, contents) @@ -142,11 +171,15 @@ def _parse_data_set_info(data_set_string): """Build dictionaries representing data set information from LISTVTOC output. - Arguments: - data_set_string {str} -- Single data set section of the LISTVTOC output. + Parameters + ---------- + data_set_string : str + Single data set section of the LISTVTOC output. - Returns: - dict -- Holds data set information from VTOC. + Returns + ------- + dict + Holds data set information from VTOC. """ lines = data_set_string.split("\n") data_set_info = {} @@ -172,13 +205,19 @@ def _parse_table_row(regex, header_row, data_row): """Parse out a single row of VTOC table information from VTOCLIST output. - Arguments: - regex {str} -- The regular expression used to parse table row. - header_row {str} -- The row of the table containing headers. - data_row {str} -- The row of the table containing data. - - Returns: - dict -- Structured data for the row of the table. + Parameters + ---------- + regex : str + The regular expression used to parse table row. + header_row : str + The row of the table containing headers. + data_row : str + The row of the table containing data. + + Returns + ------- + dict + Structured data for the row of the table. """ table_data = {} fields = re.findall(regex, header_row) @@ -200,11 +239,15 @@ def _format_table_data(table_data): This includes separating and renaming fields from their original naming and style in VTOCLIST. - Arguments: - table_data {dict} -- Structured data parsed from VTOCLIST output. + Parameters + ---------- + table_data : dict + Structured data parsed from VTOCLIST output. - Returns: - dict -- Updated data. + Returns + ------- + dict + Updated data. """ handlers = { "DATA SET NAME": "data_set_name", @@ -250,13 +293,18 @@ def _format_table_data(table_data): def _format_extend(contents, formatted_table_data): """Format the extend field from VTOCLIST. - Arguments: - contents {str} -- Contents of the extend field from VTOCLIST. - formatted_table_data {dict} -- The dictionary containing other already formatted + Parameters + ---------- + contents : str + Contents of the extend field from VTOCLIST. + formatted_table_data : dict + The dictionary containing other already formatted table data. - Returns: - dict -- The updated formatted_table_data dictionary. + Returns + ------- + dict + The updated formatted_table_data dictionary. """ matches = re.search(r"([0-9]+)(AV|BY|KB|MB)", contents) original_space_secondary = "" @@ -280,11 +328,15 @@ def _format_extend(contents, formatted_table_data): def _format_last_blk(contents): """Format the last blk field from VTOCLIST. - Arguments: - contents {str} -- Contents of the last blk field from VTOCLIST. + Parameters + ---------- + contents : str + Contents of the last blk field from VTOCLIST. - Returns: - dict -- Structured data parsed from last blk field contents. + Returns + ------- + dict + Structured data parsed from last blk field contents. """ result = None matches = re.search(r"[ ]*([0-9]+)[ ]+([0-9]+)[ ]+([0-9]+)?", contents) @@ -300,11 +352,15 @@ def _format_last_blk(contents): def _format_f2_or_f3(contents): """Format the F2 or F3 field from VTOCLIST. - Arguments: - contents {str} -- Contents of the F2 or F3 field from VTOCLIST. + Parameters + ---------- + contents : str + Contents of the F2 or F3 field from VTOCLIST. - Returns: - dict -- Structured data parsed from the F2 or F3 field contents. + Returns + ------- + dict + Structured data parsed from the F2 or F3 field contents. """ result = None matches = re.search(r"[ ]*([0-9]+)[ ]+([0-9]+)[ ]+([0-9]+)", contents) @@ -319,11 +375,15 @@ def _format_f2_or_f3(contents): def _format_dscb(contents): """Format the dscb field from VTOCLIST. - Arguments: - contents {str} -- Contents of the dscb field from VTOCLIST. + Parameters + ---------- + contents : str + Contents of the dscb field from VTOCLIST. - Returns: - dict -- Structured data parsed from the dscb field contents. + Returns + ------- + dict + Structured data parsed from the dscb field contents. """ result = None matches = re.search(r"[ ]*([0-9]+)[ ]+([0-9]+)[ ]+([0-9]+)", contents) @@ -338,13 +398,17 @@ def _format_dscb(contents): def _parse_extents(lines): """Parse and structure extent data from VTOCLIST. - Arguments: - contents {list[str]} -- Partial contents of single data set section + Parameters + ---------- + contents : list[str] + Partial contents of single data set section from VTOCLIST that will contain extent information if data set has extents. - Returns: - list[dict] -- Structured data parsed from the extent field contents. + Returns + ------- + dict + Structured data parsed from the extent field contents. """ extents = [] if re.search(r"THE\sABOVE\sDATASET\sHAS\sNO\sEXTENTS", "".join(lines)): @@ -366,13 +430,18 @@ def _parse_extents(lines): def _extent_regex_builder(indent_length, header_groups): """Build regular expressions for parsing extent information. - Arguments: - indent_length {int} -- The number of spaces before extent information starts. - header_groups {list[tuple]} -- Captured output of header groups identified + Parameters + ---------- + indent_length : int + The number of spaces before extent information starts. + header_groups : list[tuple] + Captured output of header groups identified during VTOCLIST parsing. - Returns: - str -- The regular expression for parsing extent information. + Returns + ------- + str + The regular expression for parsing extent information. """ extent_regex = "^[ ]{{{0}}}".format(str(indent_length)) for index, header_group in enumerate(header_groups): @@ -389,11 +458,15 @@ def _extent_regex_builder(indent_length, header_groups): def _format_extent_data(extent_data): """Format the dscb field from VTOCLIST. - Arguments: - extent_data {list[tuple]} -- Captured output of extent data. + Parameters + ---------- + extent_data : list[tuple] + Captured output of extent data. - Returns: - dict -- Structured data parsed from captured output of extent data. + Returns + ------- + Union[dict] + Structured data parsed from captured output of extent data. """ extents = [] flattened_extent_data = [] @@ -418,5 +491,17 @@ def _format_extent_data(extent_data): class VolumeTableOfContentsError(Exception): def __init__(self, msg=""): + """Error during VTOC parsing or retrieval. + + Parameters + ---------- + msg : str + Human readable string describing the exception. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = "An error occurred during VTOC parsing or retrieval. {0}".format(msg) super(VolumeTableOfContentsError, self).__init__(self.msg) From cf7c90c9aad9b076dcc3b2b2356f6937f84d4625 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 16:01:34 -0600 Subject: [PATCH 379/495] [Documentation][zoau_version_checker] Standarize docstrings on module_utils/zoau_version_checker.py (#1338) * Standarize docstrings on module_utils/zoau_version_checker.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style --- ...-update-docstring-zoau_version_checker.yml | 3 ++ plugins/module_utils/zoau_version_checker.py | 36 +++++++++++++------ 2 files changed, 28 insertions(+), 11 deletions(-) create mode 100644 changelogs/fragments/1338-update-docstring-zoau_version_checker.yml diff --git a/changelogs/fragments/1338-update-docstring-zoau_version_checker.yml b/changelogs/fragments/1338-update-docstring-zoau_version_checker.yml new file mode 100644 index 000000000..66d62760c --- /dev/null +++ b/changelogs/fragments/1338-update-docstring-zoau_version_checker.yml @@ -0,0 +1,3 @@ +trivial: + - zoau_version_checker - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1338). \ No newline at end of file diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index 12470ef19..442bf831a 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -25,10 +25,16 @@ def is_zoau_version_higher_than(min_version_str): """Reports back if ZOAU version is high enough. - Arguments: - min_version_str {str} -- The minimal desired ZOAU version '#.#.#'. - Returns: - bool -- Whether ZOAU version found was high enough. + + Parameters + ---------- + min_version_str : str + The minimal desired ZOAU version '#.#.#'. + + Returns + ------- + bool + Whether ZOAU version found was high enough. """ if is_valid_version_string(min_version_str): # check zoau version on system (already a list) @@ -78,10 +84,16 @@ def is_valid_version_string(version_str): series of numbers (minor) followed by another dot(.) followed by a series of numbers (patch) i.e. "#.#.#" where '#' can be any integer. There is a provision for a 4th level to this eg "v1.2.0.1". - Arguments: - min_version_str {str} -- String to be verified is in correct format. - Returns: - bool -- Whether provided str is in correct format. + + Parameters + ---------- + min_version_str : str + String to be verified is in correct format. + + Returns + ------- + bool + Whether provided str is in correct format. """ # split string into [major, minor, patch] @@ -97,10 +109,12 @@ def is_valid_version_string(version_str): def get_zoau_version_str(): """Attempts to call zoaversion on target and parses out version string. - Returns: - { [int, int, int] } -- ZOAU version found in format [#,#,#]. There is a - provision for a 4th level eg "v1.2.0.1". + Returns + ------- + Union[int, int, int] + ZOAU version found in format [#,#,#]. There is a + provision for a 4th level eg "v1.2.0.1". """ version_list = ( ZOAU_API_VERSION.split('.') From 7dd44a6c4d5084a73119e0b8c6ba991ed8f3a76d Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 16:01:57 -0600 Subject: [PATCH 380/495] [Documentation][zos_backup_restore] Standarize doc-strings on modules/zos_backup_restore.py (#1342) * Standarize doc-strings on modules/zos_backup_restore.py * Create changelog fragment * Modify google style to numpy * Standarize numpy style --- ...42-update-docstring-zos_backup_restore.yml | 3 + plugins/modules/zos_backup_restore.py | 338 ++++++++++++------ 2 files changed, 225 insertions(+), 116 deletions(-) create mode 100644 changelogs/fragments/1342-update-docstring-zos_backup_restore.yml diff --git a/changelogs/fragments/1342-update-docstring-zos_backup_restore.yml b/changelogs/fragments/1342-update-docstring-zos_backup_restore.yml new file mode 100644 index 000000000..07e529c72 --- /dev/null +++ b/changelogs/fragments/1342-update-docstring-zos_backup_restore.yml @@ -0,0 +1,3 @@ +trivial: + - zos_backup_restore - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1342). \ No newline at end of file diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index a112da247..1bb0d8977 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -333,7 +333,13 @@ def main(): - """Run the zos_backup_restore module core functions.""" + """Run the zos_backup_restore module core functions. + + Raises + ------ + fail_json + Any error ocurred during execution. + """ result = dict(changed=False, message="", backup_name="") module_args = dict( operation=dict(type="str", required=True, choices=["backup", "restore"]), @@ -420,11 +426,15 @@ def main(): def parse_and_validate_args(params): """Parse and validate arguments to be used by remainder of module. - Args: - params (dict): The params as returned from AnsibleModule instantiation. + Parameters + ---------- + params : dict + The params as returned from AnsibleModule instantiation. - Returns: - dict: The updated params after additional parsing and validation. + Returns + ------- + dict + The updated params after additional parsing and validation. """ arg_defs = dict( operation=dict(type="str", required=True, choices=["backup", "restore"]), @@ -484,20 +494,34 @@ def backup( ): """Backup data sets or a volume to a new data set or unix file. - Args: - backup_name (str): The data set or UNIX path to place the backup. - include_data_sets (list): A list of data set patterns to include in the backup. - exclude_data_sets (list): A list of data set patterns to exclude from the backup. - volume (str): The volume that contains the data sets to backup. - full_volume (bool): Specifies if a backup will be made of the entire volume. - temp_volume (bool): Specifies the volume that should be used to store temporary files. - overwrite (bool): Specifies if existing data set or UNIX file matching I(backup_name) should be deleted. - recover (bool): Specifies if potentially recoverable errors should be ignored. - space (int): Specifies the amount of space to allocate for the backup. - space_type (str): The unit of measurement to use when defining data set space. - sms_storage_class (str): Specifies the storage class to use. - sms_management_class (str): Specifies the management class to use. - tmp_hlq (str): Specifies the tmp hlq to temporary datasets + Parameters + ---------- + backup_name : str + The data set or UNIX path to place the backup. + include_data_sets : list + A list of data set patterns to include in the backup. + exclude_data_sets : list + A list of data set patterns to exclude from the backup. + volume : str + The volume that contains the data sets to backup. + full_volume : bool + Specifies if a backup will be made of the entire volume. + temp_volume : bool + Specifies the volume that should be used to store temporary files. + overwrite : bool + Specifies if existing data set or UNIX file matching I(backup_name) should be deleted. + recover : bool + Specifies if potentially recoverable errors should be ignored. + space : int + Specifies the amount of space to allocate for the backup. + space_type : str + The unit of measurement to use when defining data set space. + sms_storage_class : str + Specifies the storage class to use. + sms_management_class : str + Specifies the management class to use. + tmp_hlq : str + Specifies the tmp hlq to temporary datasets. """ args = locals() zoau_args = to_dzip_args(**args) @@ -520,27 +544,47 @@ def restore( sms_management_class, tmp_hlq, ): - """[summary] - - Args: - backup_name (str): The data set or UNIX path containing the backup. - include_data_sets (list): A list of data set patterns to include in the restore - that are present in the backup. - exclude_data_sets (list): A list of data set patterns to exclude from the restore - that are present in the backup. - volume (str): The volume that contains the data sets to backup. - full_volume (bool): Specifies if a backup will be made of the entire volume. - temp_volume (bool): Specifies the volume that should be used to store temporary files. - overwrite (bool): Specifies if module should overwrite existing data sets with - matching name on the target device. - recover (bool): Specifies if potentially recoverable errors should be ignored. - hlq (str): Specifies the new HLQ to use for the data sets being restored. - space (int): Specifies the amount of space to allocate for data sets temporarily - created during the restore process. - space_type (str): The unit of measurement to use when defining data set space. - sms_storage_class (str): Specifies the storage class to use. - sms_management_class (str): Specifies the management class to use. - tmp_hlq (str): : Specifies the tmp hlq to temporary datasets + """Restore data sets or a volume from the backup. + + Parameters + ---------- + backup_name : str + The data set or UNIX path containing the backup. + include_data_sets : list + A list of data set patterns to include in the restore + that are present in the backup. + exclude_data_sets : list + A list of data set patterns to exclude from the restore + that are present in the backup. + volume : str + The volume that contains the data sets to backup. + full_volume : bool + Specifies if a backup will be made of the entire volume. + temp_volume : bool + Specifies the volume that should be used to store temporary files. + overwrite : bool + Specifies if module should overwrite existing data sets with + matching name on the target device. + recover : bool + Specifies if potentially recoverable errors should be ignored. + hlq : str + Specifies the new HLQ to use for the data sets being restored. + space : int + Specifies the amount of space to allocate for data sets temporarily + created during the restore process. + space_type : str + The unit of measurement to use when defining data set space. + sms_storage_class : str + Specifies the storage class to use. + sms_management_class : str + Specifies the management class to use. + tmp_hlq : str + Specifies the tmp hlq to temporary datasets. + + Raises + ------ + ZOAUException + When any exception is raised during the data set allocation. """ args = locals() zoau_args = to_dunzip_args(**args) @@ -566,11 +610,15 @@ def restore( def get_real_rc(output): """Parse out the final RC from MVS program output. - Args: - output (str): The MVS program output. + Parameters + ---------- + output : str + The MVS program output. - Returns: - int: The true program RC. + Returns + ------- + int + The true program RC. """ true_rc = None match = search( @@ -585,16 +633,24 @@ def get_real_rc(output): def data_set_pattern_type(contents, dependencies): """Validates provided data set patterns. - Args: - contents (Union[str, list[str]]): One or more data set patterns - dependencies (dict): Any dependent arguments - - Raises: - ValueError: When provided argument is not a string or a list - ValueError: When provided argument is an invalid data set pattern - - Returns: - list[str]: A list of uppercase data set patterns + Parameters + ---------- + contents : Union[str, list[str] + One or more data set patterns. + dependencies : dict + Any dependent arguments. + + Returns + ------- + Union[str] + A list of uppercase data set patterns. + + Raises + ------ + ValueError + When provided argument is not a string or a list. + ValueError + When provided argument is an invalid data set pattern. """ if contents is None: return None @@ -619,16 +675,24 @@ def data_set_pattern_type(contents, dependencies): def hlq_type(contents, dependencies): """Validates provided HLQ is valid and is not specified for a backup operation. - Args: - contents (str): The HLQ to use - dependencies (dict): Any dependent arguments - - Raises: - ValueError: When operation is restore and HLQ is provided - ValueError: When an invalid HLQ is provided - - Returns: - str: The HLQ to use + Parameters + ---------- + contents : str + The HLQ to use. + dependencies : dict + Any dependent arguments. + + Returns + ------- + str + The HLQ to use. + + Raises + ------ + ValueError + When operation is restore and HLQ is provided. + ValueError + When an invalid HLQ is provided. """ if contents is None: return None @@ -642,12 +706,17 @@ def hlq_type(contents, dependencies): def hlq_default(contents, dependencies): """Sets the default HLQ to use if none is provided. - Args: - contents (str): The HLQ to use - dependencies (dict): Any dependent arguments - - Returns: - str: The HLQ to use + Parameters + ---------- + contents : str + The HLQ to use. + dependencies : dict + Any dependent arguments. + + Returns + ------- + str + The HLQ to use. """ hlq = None if dependencies.get("operation") == "restore": @@ -658,15 +727,22 @@ def hlq_default(contents, dependencies): def sms_type(contents, dependencies): """Validates the SMS class provided matches a valid format. - Args: - contents (str): The SMS class name - dependencies (dict): Any dependent arguments - - Raises: - ValueError: When invalid argument provided for SMS class. - - Returns: - str: The uppercase SMS class name + Parameters + ---------- + contents : str + The SMS class name. + dependencies : dict + Any dependent arguments. + + Returns + ------- + str + The uppercase SMS class name. + + Raises + ------ + ValueError + When invalid argument provided for SMS class. """ if contents is None: return None @@ -678,12 +754,17 @@ def sms_type(contents, dependencies): def space_type(contents, dependencies): """Validates amount of space provided. - Args: - contents (str): The amount of space - dependencies (dict): Any dependent arguments - - Returns: - int: The amount of space + Parameters + ---------- + contents : str + The amount of space. + dependencies : dict + Any dependent arguments. + + Returns + ------- + int + The amount of space. """ if contents is None: if dependencies.get("full_volume"): @@ -697,15 +778,22 @@ def space_type_type(contents, dependencies): """Validates provided data set unit of space. Returns the unit of space. - Args: - contents (str): The space type to use - dependencies (dict): Any dependent arguments - - Raises: - ValueError: When an invalid space unit is provided - - Returns: - str: The unit of space + Parameters + ---------- + contents : str + The space type to use. + dependencies : dict + Any dependent arguments. + + Returns + ------- + str + The unit of space. + + Raises + ------ + ValueError + When an invalid space unit is provided. """ if contents is None: if dependencies.get("full_volume"): @@ -724,15 +812,22 @@ def space_type_type(contents, dependencies): def backup_name_type(contents, dependencies): """Validates provided backup name. - Args: - contents (str): The backup name to use - dependencies (dict): Any dependent arguments - - Raises: - ValueError: When an invalid backup name is provided - - Returns: - str: The backup name to use + Parameters + ---------- + contents : str + The backup name to use + dependencies : dict + Any dependent arguments + + Returns + ------- + str + The backup name to use + + Raises + ------ + ValueError + When an invalid backup name is provided """ if contents is None: return None @@ -751,15 +846,22 @@ def backup_name_type(contents, dependencies): def full_volume_type(contents, dependencies): """Validates dependent arguments are also specified for full_volume argument. - Args: - contents (bool): Whether we are making a full volume backup or not - dependencies (dict): Any dependent arguments - - Raises: - ValueError: When volume argument is not provided - - Returns: - bool: Whether we are making a full volume backup or not + Parameters + ---------- + contents : bool + Whether we are making a full volume backup or not. + dependencies : dict + Any dependent arguments. + + Returns + ------- + bool + Whether we are making a full volume backup or not. + + Raises + ------ + ValueError + When volume argument is not provided. """ if contents is True and dependencies.get("volume") is None: raise ValueError("full_volume=True is only valid when volume is specified.") @@ -769,8 +871,10 @@ def full_volume_type(contents, dependencies): def to_dzip_args(**kwargs): """API adapter for ZOAU dzip method. - Returns: - dict: The arguments for ZOAU dzip method translated from module arguments + Returns + ------- + dict + The arguments for ZOAU dzip method translated from module arguments. """ zoau_args = {} if kwargs.get("backup_name"): @@ -821,8 +925,10 @@ def to_dzip_args(**kwargs): def to_dunzip_args(**kwargs): """API adapter for ZOAU dunzip method. - Returns: - dict: The arguments for ZOAU dunzip method translated from module arguments + Returns + ------- + dict + The arguments for ZOAU dunzip method translated from module arguments. """ zoau_args = {} if kwargs.get("backup_name"): From 3fa469727f12b20f06031080f49c2d38370ad7b3 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 16:02:09 -0600 Subject: [PATCH 381/495] [Documentation][file] Standarize docstrings on module-utils/file.py (#1362) * Standarize docstrings on modules/file.py to numpy style * Add changelog fragment * Standarize numpy style * Fixed year --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1362-update-docstring-file.yml | 3 +++ plugins/module_utils/file.py | 26 ++++++++++++------- 2 files changed, 20 insertions(+), 9 deletions(-) create mode 100644 changelogs/fragments/1362-update-docstring-file.yml diff --git a/changelogs/fragments/1362-update-docstring-file.yml b/changelogs/fragments/1362-update-docstring-file.yml new file mode 100644 index 000000000..3a86d6032 --- /dev/null +++ b/changelogs/fragments/1362-update-docstring-file.yml @@ -0,0 +1,3 @@ +trivial: + - file - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1362). \ No newline at end of file diff --git a/plugins/module_utils/file.py b/plugins/module_utils/file.py index b6fa63ca5..0a0f9f3ce 100644 --- a/plugins/module_utils/file.py +++ b/plugins/module_utils/file.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -22,11 +22,15 @@ def _get_dir_mode(path): """Get the mode of an existing directory. Defaults to 0600 if directory not found. - Arguments: - path {str} -- The absolute path to retrieve directory mode from. + Parameters + ---------- + path : str + The absolute path to retrieve directory mode from. - Returns: - int -- The mode of the directory. + Returns + ------- + int + The mode of the directory. """ mask = S_IREAD | S_IWRITE if os.path.isdir(path): @@ -41,11 +45,15 @@ def make_dirs(path, mode_from=None): If path does not end in "/", assumes end of path is a file. - Arguments: - path {str} -- The path to ensure subdirectories are created for. + Parameters + ---------- + path : str + The path to ensure subdirectories are created for. - Keyword Arguments: - mode_from {str} -- Path to existing dir to retrieve the mode from. + Keyword Parameters + ------------------ + mode_from : str + Path to existing dir to retrieve the mode from. Mode will be used for new directories. (default: {None}) """ mode = _get_dir_mode(mode_from) if mode_from is not None else S_IREAD | S_IWRITE From 06e86fd8481c32a84c56d844e996cc33199c7431 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 14 May 2024 16:02:23 -0600 Subject: [PATCH 382/495] [Documentation][backup] Add and standarize docstrings on module-utils/backup.py (#1384) * Modify google style docstrings to numpy * Add changelog fragment * Update backup.py Added ending linefeed to address pep8 issue * Update backup.py fix pep8 (extra whitespace at end of file) * modified docstring --------- Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1384-update-docstring-backup.yml | 3 + plugins/module_utils/backup.py | 172 +++++++++++++----- 2 files changed, 134 insertions(+), 41 deletions(-) create mode 100644 changelogs/fragments/1384-update-docstring-backup.yml diff --git a/changelogs/fragments/1384-update-docstring-backup.yml b/changelogs/fragments/1384-update-docstring-backup.yml new file mode 100644 index 000000000..7c5689c61 --- /dev/null +++ b/changelogs/fragments/1384-update-docstring-backup.yml @@ -0,0 +1,3 @@ +trivial: + - backup - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1384). \ No newline at end of file diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index 46f8669c5..5b3d09614 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -51,6 +51,18 @@ def _validate_data_set_name(ds): + """Validate data set name. + + Parameters + ---------- + ds : str + The source dataset. + + Returns + ------- + str + Parsed dataset. + """ arg_defs = dict(ds=dict(arg_type="data_set")) parser = BetterArgParser(arg_defs) parsed_args = parser.parse_args({"ds": ds}) @@ -58,16 +70,27 @@ def _validate_data_set_name(ds): def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): - """Create a backup data set for an MVS data set - - Arguments: - dsn {str} -- The name of the data set to backup. - It could be an MVS PS/PDS/PDSE/VSAM(KSDS), etc. - bk_dsn {str} -- The name of the backup data set. - - Raises: - BackupError: When backup data set exists. - BackupError: When creation of backup data set fails. + """Create a backup data set for an MVS data set. + + Parameters + ---------- + dsn : str + The name of the data set to backup. + It could be an MVS PS/PDS/PDSE/VSAM(KSDS), etc. + bk_dsn : str + The name of the backup data set. + + Returns + ------- + str + The backup dataset + + Raises + ------ + BackupError + When backup data set exists. + BackupError + When creation of backup data set fails. """ dsn = _validate_data_set_name(dsn).upper() if is_member(dsn): @@ -98,7 +121,7 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): # In case the backup ds is a member we trust that the PDS attributes are ok to fit the src content. # This should not delete a PDS just to create a backup member. - # Otherwise, we allocate the appropiate space for the backup ds based on src. + # Otherwise, we allocate the appropriate space for the backup ds based on src. if is_member(bk_dsn): try: cp_rc = datasets.copy(dsn, bk_dsn) @@ -122,20 +145,29 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): def uss_file_backup(path, backup_name=None, compress=False): - """Create a backup file for a USS file or path - - Arguments: - path {str} -- The name of the USS file or path to backup. - backup_name {str} -- The name of the backup file. - - Keyword Arguments: - compress {bool} -- Determines if the backup be compressed. (default: {False}) - - Raises: - BackupError: When creating compressed backup fails. - - Returns: - str -- Name of the backup file. + """Create a backup file for a USS file or path. + + Parameters + ---------- + path : str + The name of the USS file or path to backup. + backup_name : str + The name of the backup file. + + Keyword Parameters + ------------------ + compress : bool + Determines if the backup be compressed. (default: {False}) + + Returns + ------- + str + Name of the backup file. + + Raises + ------ + BackupError + When creating compressed backup fails. """ abs_path = os.path.abspath(path) @@ -188,14 +220,24 @@ def uss_file_backup(path, backup_name=None, compress=False): def _copy_ds(ds, bk_ds): - """Copy the contents of a data set to another - - Arguments: - ds {str} -- The source data set to be copied from. Should be SEQ or VSAM - bk_dsn {str} -- The destination data set to copy to. - - Raises: - BackupError: When copying data fails + """Copy the contents of a data set to another. + + Parameters + ---------- + ds : str + The source data set to be copied from. Should be SEQ or VSAM. + bk_dsn : str + The destination data set to copy to. + + Returns + ------- + int + Return code. + + Raises + ------ + BackupError + When copying data fails. """ module = AnsibleModuleHelper(argument_spec={}) _allocate_model(bk_ds, ds) @@ -220,14 +262,24 @@ def _copy_ds(ds, bk_ds): def _allocate_model(ds, model): - """Allocate a data set using allocation information of a model data set - - Arguments: - ds {str} -- The name of the data set to be allocated. - model {str} -- The name of the data set whose allocation parameters should be used. - - Raises: - BackupError: When allocation fails + """Allocate a data set using allocation information of a model data set. + + Parameters + ---------- + ds : str + The name of the data set to be allocated. + model : str + The name of the data set whose allocation parameters should be used. + + Returns + ------- + int + Return code. + + Raises + ------ + BackupError + When allocation fails. """ module = AnsibleModuleHelper(argument_spec={}) alloc_cmd = """ ALLOC - @@ -247,6 +299,20 @@ def _allocate_model(ds, model): def _copy_pds(ds, bk_dsn): + """Copy a dataset. + + Parameters + ---------- + ds : str + The name of the data set to be allocated. + bk_dsn : str + The destination data set to copy to. + + Returns + ------- + str + Copied dataset. + """ dds = dict(OUTPUT=bk_dsn, INPUT=ds) copy_cmd = " COPY OUTDD=OUTPUT,INDD=((INPUT,R))" return iebcopy(copy_cmd, dds=dds) @@ -254,6 +320,30 @@ def _copy_pds(ds, bk_dsn): class BackupError(Exception): def __init__(self, message, rc=None, stdout=None, stderr=None): + """Error during backup. + + Parameters + ---------- + message : str + Human readable string describing the exception. + rc : int + Return code. + stdout : str + Standard output. + stderr : str + Standard error. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + rc : int + Return code. + stdout : str + Standard output. + stderr : str + Standard error. + """ self.msg = 'An error occurred during backup: "{0}"'.format(message) self.rc = rc self.stdout = stdout From 9b7369d5aee4a8a81d22e072bfd0f31777df29d7 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 15 May 2024 15:52:31 -0600 Subject: [PATCH 383/495] [Enabler][zos_data_set] Add support to GDG and special characters. (#1504) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * modified DatasetCreatedError message * Added gdg functions * Created unit test for validating gds relative name * Updated to fail when future gen * Update arg parser * Adding gdg support for zos_data_set * Add escaping function for data set names * Add unit tests for name escaping * Remove calls to escape_data_set_name * renamed tests * Added MVSDataset class * Updated escaped symbols * Updated tests * Added utils * Add changelog * Uncommented test * Updated exception * Updated mvsdataset class * Updated class * Added type * Added gds tests * Testing for special symbols * Added support for MVSDataset class * Added attributes to docs * Updated batch options * Added data set creation to create temp * Added methods to mvsdataset class * Fixed lint issues * fixed temp * fixed raw_name * Added record format to none for vsam * Added fixes * Added repr to module * Added member class * Added latest changes to zos_data_set * Updated with latest errors * Fixing documentation. * Added fragment * Modified docstrings * Added test for data set member * Updated archive * Updated doc * Updated doc * removed print * Set is_gds_active * Updated test * Fixed review comments * Fixed review comments --------- Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .../1504-zos_data_set-gdg-support.yml | 5 + plugins/module_utils/data_set.py | 434 ++++++++++++++++-- plugins/modules/zos_data_set.py | 306 ++++++++++-- .../modules/test_zos_data_set_func.py | 109 ++++- tests/helpers/dataset.py | 40 +- 5 files changed, 808 insertions(+), 86 deletions(-) create mode 100644 changelogs/fragments/1504-zos_data_set-gdg-support.yml diff --git a/changelogs/fragments/1504-zos_data_set-gdg-support.yml b/changelogs/fragments/1504-zos_data_set-gdg-support.yml new file mode 100644 index 000000000..42becb638 --- /dev/null +++ b/changelogs/fragments/1504-zos_data_set-gdg-support.yml @@ -0,0 +1,5 @@ +minor_changes: + - zos_data_set - Added support for GDG and GDS relative name notation to create, delete, + catalog and uncatalog a data set. Added support for data set names with special characters + like $, /#, /- and @. + (https://github.com/ansible-collections/ibm_zos_core/pull/1504). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index c6b6b4e52..f741b5c70 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -17,22 +17,17 @@ import tempfile import traceback from os import path, walk -from string import ascii_uppercase, digits from random import sample +from string import ascii_uppercase, digits + # from ansible.module_utils._text import to_bytes from ansible.module_utils.common.text.converters import to_bytes -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( - AnsibleModuleHelper, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - MissingImport, - ZOAUImportError, -) - from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - better_arg_parser, - mvs_cmd, -) + better_arg_parser, mvs_cmd) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import \ + AnsibleModuleHelper +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + MissingImport, ZOAUImportError) try: from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import vtoc @@ -97,6 +92,7 @@ def ensure_present( name, replace, type, + raw_name=None, space_primary=None, space_secondary=None, space_type=None, @@ -116,8 +112,8 @@ def ensure_present( """Creates data set if it does not already exist. Args: - name (str): The name of the dataset - replace (bool) -- Used to determine behavior when data set already exists. + name (str): The name of the dataset. + raw_name (str): Original name without escaping or gds name resolve operations performed. type (str, optional): The type of dataset. Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. Defaults to None. @@ -166,6 +162,7 @@ def ensure_present( When using SMS, volumes can be provided when the storage class being used has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. Defaults to None. + replace (bool) -- Used to determine behavior when data set already exists. tmp_hlq (str, optional): High level qualifier for temporary datasets. force (bool, optional): Used to determine behavior when performing member operations on a pdse. Defaults to None. @@ -356,8 +353,9 @@ def data_set_cataloged(name, volumes=None): Returns: bool -- If data is is cataloged. """ - - name = name.upper() + # We need to unescape because this calls to system can handle + # special characters just fine. + name = name.upper().replace("\\", '') module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name) @@ -370,7 +368,7 @@ def data_set_cataloged(name, volumes=None): if bool(set(volumes) & set(cataloged_volume_list)): return True else: - if re.search(r"-\s" + name + r"\s*\n\s+IN-CAT", stdout): + if re.search(r"-\s" + re.escape(name) + r"\s*\n\s+IN-CAT", stdout): return True return False @@ -829,6 +827,7 @@ def _is_in_vtoc(name, volume): def replace( name, type, + raw_name=None, space_primary=None, space_secondary=None, space_type=None, @@ -848,7 +847,8 @@ def replace( """Attempts to replace an existing data set. Args: - name (str): The name of the dataset + name (str): The name of the dataset. + raw_name (str): Original name without escaping or gds name resolve operations performed. type (str, optional): The type of dataset. Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. Defaults to None. @@ -920,14 +920,14 @@ def _build_zoau_args(**kwargs): if space_type: secondary += space_type - type = kwargs.get("type") - if type and type.upper() == "ZFS": - type = "LDS" + ds_type = kwargs.get("type") + if ds_type and ds_type.upper() == "ZFS": + ds_type = "LDS" volumes = ",".join(volumes) if volumes else None kwargs["space_primary"] = primary kwargs["space_secondary"] = secondary - kwargs["dataset_type"] = type + kwargs["dataset_type"] = ds_type kwargs["volumes"] = volumes kwargs.pop("space_type", None) renamed_args = {} @@ -944,6 +944,7 @@ def _build_zoau_args(**kwargs): def create( name, type, + raw_name=None, space_primary=None, space_secondary=None, space_type=None, @@ -965,7 +966,8 @@ def create( Reasonable default arguments will be set by ZOAU when necessary. Args: - name (str): The name of the dataset + name (str): The name of the dataset. + raw_name (str): Original name without escaping or gds name resolve operations performed. type (str, optional): The type of dataset. Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. Defaults to None. @@ -1026,7 +1028,7 @@ def create( datasets.create(**formatted_args) except exceptions._ZOAUExtendableException as create_exception: raise DatasetCreateError( - name, + raw_name if raw_name else name, create_exception.response.rc, create_exception.response.stdout_response + "\n" + create_exception.response.stderr_response ) @@ -1036,7 +1038,7 @@ def create( if DataSet.data_set_cataloged(name, volumes): return 0 raise DatasetCreateError( - name, + raw_name if raw_name else name, msg="Unable to verify the data set was created. Received DatasetVerificationError from ZOAU.", ) # With ZOAU 1.3 we switched from getting a ZOAUResponse obj to a Dataset obj, previously we returned @@ -1801,7 +1803,7 @@ class MVSDataSet(): """ This class represents a z/OS data set that can be yet to be created or already created in the system. It encapsulates the data set attributes - to easy access. + to easy access and provides operations to perform in the same data set. """ def __init__( @@ -1826,7 +1828,10 @@ def __init__( total_space=None, used_space=None, last_referenced=None, + is_cataloged=None, ): + # Different class variables + self.data_set_possible_states = {"unknown", "present", "absent"} self.name = name self.organization = organization self.record_format = record_format @@ -1850,14 +1855,387 @@ def __init__( self.sms_management_class = sms_management_class self.volumes = volumes self.is_gds_active = False + self.is_cataloged = False + # If name has escaped chars or is GDS relative name we clean it. - self.name = DataSet.escape_data_set_name(self.name) + # self.name = DataSet.escape_data_set_name(self.name) if DataSet.is_gds_relative_name(self.name): try: self.name = DataSet.resolve_gds_absolute_name(self.name) - except Exception as e: + self.is_gds_active = True + except Exception: # This means the generation is a positive version so is only used for creation. self.is_gds_active = False + if self.data_set_type.upper() in DataSet.MVS_VSAM or self.data_set_type == "zfs": + # When trying to create a new VSAM with a specified record format will fail + # with ZOAU + self.record_format = None + + def create(self): + """Creates the data set in question. + + Returns + ------- + int + Indicates if changes were made. + """ + arguments = { + "name" : self.name, + "raw_name" : self.raw_name, + "replace" : self.replace, + "type" : self.data_set_type, + "space_primary" : self.space_primary, + "space_secondary" : self.space_secondary, + "space_type" : self.space_type, + "record_format" : self.record_format, + "record_length" : self.record_length, + "block_size" : self.block_size, + "directory_blocks" : self.directory_blocks, + "key_length" : self.key_length, + "key_offset" : self.key_offset, + "sms_storage_class" : self.sms_storage_class, + "sms_data_class" : self.sms_data_class, + "sms_management_class" : self.sms_management_class, + "volumes" : self.volumes, + "tmp_hlq" : self.tmp_hlq, + "force" : self.force, + } + DataSet.create(**arguments) + self.set_state("present") + + def ensure_present(self, tmp_hlq=None, replace=False, force=False): + """ Make sure that the data set is created or fail creating it. + + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + replace : bool + Used to determine behavior when data set already exists. + force : bool + Used to determine behavior when performing member operations on a pdse. + + Returns + ------- + int + Indicates if changes were made. + """ + arguments = { + "name" : self.name, + "raw_name" : self.raw_name, + "type" : self.data_set_type, + "space_primary" : self.space_primary, + "space_secondary" : self.space_secondary, + "space_type" : self.space_type, + "record_format" : self.record_format, + "record_length" : self.record_length, + "block_size" : self.block_size, + "directory_blocks" : self.directory_blocks, + "key_length" : self.key_length, + "key_offset" : self.key_offset, + "sms_storage_class" : self.sms_storage_class, + "sms_data_class" : self.sms_data_class, + "sms_management_class" : self.sms_management_class, + "volumes" : self.volumes, + "replace" : replace, + "tmp_hlq" : tmp_hlq, + "force" : force, + } + rc = DataSet.ensure_present(**arguments) + self.set_state("present") + return rc + + def ensure_absent(self): + """Removes the data set. + + Returns + ------- + int + Indicates if changes were made. + """ + rc = DataSet.ensure_absent(self.name, self.volumes) + if rc == 0: + self.set_state("absent") + return rc + + def delete(self): + """Deletes the data set in question. + + Returns + ------- + int + Indicates if changes were made. + """ + DataSet.ensure_absent(self.name, self.volumes) + self.set_state("absent") + + def ensure_cataloged(self): + """ + Ensures the data set is cataloged, if not catalogs it. + + Returns + ------- + int + Indicates if changes were made. + """ + rc = DataSet.ensure_cataloged(name=self.name, volumes=self.volumes) + self.is_cataloged = True + return rc + + def catalog(self): + """Catalog the data set in question. + + Returns + ------- + int + Indicates if changes were made. + """ + rc = DataSet.catalog(self.name, self.volumes) + self.is_cataloged = True + return rc + + def ensure_uncataloged(self): + """ + Ensures the data set is uncataloged, if not catalogs it. + + Returns + ------- + int + Indicates if changes were made. + """ + rc = DataSet.ensure_uncataloged(self.name) + self.is_cataloged = False + return rc + + def uncatalog(self): + """Uncatalog the data set in question. + + Returns + ------- + int + Indicates if changes were made. + """ + rc = DataSet.uncatalog(self.name) + self.is_cataloged = False + return rc + + def set_state(self, new_state): + """Used to set the data set state. + + Parameters + ---------- + new_state : str {unknown, present, absent} + New state of the data set. + + Returns + ------- + bool + If state was set properly. + """ + if new_state not in self.data_set_possible_states: + raise ValueError(f"State {self.state} not supported for MVSDataset class.") + return True + + +class Member(): + """Represents a member on z/OS. + + Attributes + ---------- + name : str + Data set member name. + parent_data_set_type : str {pds, pdse} + Parent data set type. + data_set_type : str + Member data set type, should always be "member". + """ + def __init__( + self, + name, + parent_data_set_type="pds", + ): + self.name = name + self.parent_data_set_type = parent_data_set_type + self.data_set_type = "member" + + def ensure_absent(self, force): + """ Make sure that the member is absent or fail deleting it. + + Parameters + ---------- + force : bool + Used to determine behavior when performing member operations on a pdse. + + Returns + ------- + int + Indicates if changes were made. + """ + rc = DataSet.ensure_member_absent(self.name, force) + return rc + + def ensure_present(self, replace=None): + """ Make sure that the member is created or fail creating it. + + Parameters + ---------- + replace : bool + Used to determine behavior when member already exists. + + Returns + ------- + int + Indicates if changes were made. + """ + rc = DataSet.ensure_member_present(self.name, replace) + return rc + + +class GenerationDataGroup(): + """Represents a Generation Data Group base in z/OS. + + Attributes + ---------- + name : str + The name of the GDG base. + limit : int + Maximum number of generations associated with this GDG base. + empty : bool + Empty attribute for the GDG base. + purge : bool + purge attribute for the GDG base. + scratch : bool + scratch attribute for the GDG base. + extended : bool + extended attribute for the GDG base. If extended a GDG base can + have up to 999 generations, if not just up to 255. + fifo : bool + fifo attribute for the GDG base. + data_set_type : str + data_set_type will always be "gdg" + raw_name : str + The raw name of the data set. + gdg : GenerationDataGroupView + ZOAU GenerationDataGroupView object to interact with the GDG base. + """ + def __init__( + self, + name, + limit, + empty=False, + purge=False, + scratch=False, + extended=False, + fifo=False, + ): + self.name = name + self.limit = limit + self.empty = empty + self.purge = purge + self.scratch = scratch + self.extended = extended + self.fifo = fifo + self.data_set_type = "gdg" + self.raw_name = name + self.gdg = None + # Removed escaping since is not needed by the GDG python api. + # self.name = DataSet.escape_data_set_name(self.name) + + def create(self): + """Creates the GDG. + + Returns + ------- + int + Indicates if changes were made. + """ + gdg = gdgs.create( + name=self.name, + limit=self.limit, + empty=self.empty, + purge=self.purge, + scratch=self.scratch, + extended=self.extended, + fifo=self.fifo, + ) + self.gdg = gdg + return True + + def ensure_present(self, replace): + """Make sure that the data set is created or fail creating it. + Parameters + ---------- + replace : bool + Used to determine behavior when member already exists. + + Returns + ------- + int + Indicates if changes were made. + """ + arguments = vars(self) + changed = False + present = False + gdg = None + if gdgs.exists(arguments.get("name")): + present = True + + if not present: + gdg = gdgs.create(**arguments) + else: + if not replace: + return changed + changed = self.ensure_absent() + gdg = gdgs.create(**arguments) + if isinstance(gdg, gdgs.GenerationDataGroupView): + changed = True + return changed + + def ensure_absent(self, force): + """Ensure gdg base is deleted. If force is True and there is an + existing GDG with active generations it will remove them and delete + the GDG. + Parameters + ---------- + force : bool + If the GDG base has active generations, remove them and delete the GDG base. + + Returns + ------- + int + Indicates if changes were made. + """ + # Try to delete + rc = datasets.delete(self.name) + if rc > 0: + if force: + if isinstance(self.gdg, gdgs.GenerationDataGroupView): + self.gdg.delete() + else: + gdg_view = gdgs.GenerationDataGroupView(name=self.name) + gdg_view.delete() + else: + raise DatasetDeleteError(self.raw_name, rc) + return True + + def clear(self): + """Deletes the active generations without removing the GDG base. + Parameters + ---------- + force : bool + If the GDG base has active generations, remove them and delete the GDG base. + + Returns + ------- + int + Indicates if changes were made. + """ + if isinstance(self.gdg, gdgs.GenerationDataGroupView): + self.gdg.clear() + else: + gdg_view = gdgs.GenerationDataGroupView(name=self.name) + gdg_view.clear() + return True def is_member(data_set): diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index b500eb84a..b85e97aea 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -66,6 +66,11 @@ will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with I(changed=False). + - > + If I(state=absent) and I(type=gdg) and the GDG base has active generations the module + will complete successfully with I(changed=False). To remove it option I(force) needs + to be used. If the GDG base does not have active generations the module will complete + successfully with I(changed=True). - > If I(state=present) and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with I(changed=True). @@ -128,6 +133,7 @@ - member - hfs - zfs + - gdg default: pds space_primary: description: @@ -232,6 +238,54 @@ - I(key_length) should only be provided when I(type=ksds) type: int required: false + empty: + description: + - Sets the I(empty) attribute for Generation Data Groups. + - If false, removes only the oldest GDS entry when a new GDS is created that causes GDG limit to be exceeded. + - If true, removes all GDS entries from a GDG base when a new GDS is created that causes the + GDG limit to be exceeded. + - Default is false. + type: bool + required: false + extended: + description: + - Sets the I(extended) attribute for Generation Data Groups. + - If false, allow up to 255 generation data sets (GDSs) to be associated with the GDG. + - If true, allow up to 999 generation data sets (GDS) to be associated with the GDG. + - Default is false. + type: bool + required: false + fifo: + description: + - Sets the I(fifo) attribute for Generation Data Groups. + - If false, the order is the newest GDS defined to the oldest GDS. This is the default value. + - If true, the order is the oldest GDS defined to the newest GDS. + - Default is false. + type: bool + required: false + limit: + description: + - Sets the I(limit) attribute for Generation Data Groups. + - Specifies the maximum number, from 1 to 255(up to 999 if extended), of GDS that can be + associated with the GDG being defined. + - I(limit) is required when I(type=gdg). + type: int + required: false + purge: + description: + - Sets the I(purge) attribute for Generation Data Groups. + - Specifies whether to override expiration dates when a generation data set (GDS) + is rolled off and the C(scratch) option is set. + type: bool + required: false + scratch: + description: + - Sets the I(scratch) attribute for Generation Data Groups. + - Specifies what action is to be taken for a generation data set located on disk + volumes when the data set is uncataloged from the GDG base as a result of + EMPTY/NOEMPTY processing. + type: bool + required: false volumes: description: - > @@ -281,7 +335,9 @@ - The I(force=True) option enables sharing of data sets through the disposition I(DISP=SHR). - The I(force=True) only applies to data set members when I(state=absent) - and I(type=member). + and I(type=member) and when removing a GDG base with active generations. + - If I(force=True), I(type=gdg) and I(state=absent) it will force remove + a GDG base with active generations. type: bool required: false default: false @@ -393,6 +449,7 @@ - member - hfs - zfs + - gdg default: pds space_primary: description: @@ -497,6 +554,54 @@ - I(key_length) should only be provided when I(type=ksds) type: int required: false + empty: + description: + - Sets the I(empty) attribute for Generation Data Groups. + - If false, removes only the oldest GDS entry when a new GDS is created that causes GDG limit to be exceeded. + - If true, removes all GDS entries from a GDG base when a new GDS is created that causes the + GDG limit to be exceeded. + - Default is false. + type: bool + required: false + extended: + description: + - Sets the I(extended) attribute for Generation Data Groups. + - If false, allow up to 255 generation data sets (GDSs) to be associated with the GDG. + - If true, allow up to 999 generation data sets (GDS) to be associated with the GDG. + - Default is false. + type: bool + required: false + fifo: + description: + - Sets the I(fifo) attribute for Generation Data Groups. + - If false, the order is the newest GDS defined to the oldest GDS. This is the default value. + - If true, the order is the oldest GDS defined to the newest GDS. + - Default is false. + type: bool + required: false + limit: + description: + - Sets the I(limit) attribute for Generation Data Groups. + - Specifies the maximum number, from 1 to 255(up to 999 if extended), of GDS that can be + associated with the GDG being defined. + - I(limit) is required when I(type=gdg). + type: int + required: false + purge: + description: + - Sets the I(purge) attribute for Generation Data Groups. + - Specifies whether to override expiration dates when a generation data set (GDS) + is rolled off and the C(scratch) option is set. + type: bool + required: false + scratch: + description: + - Sets the I(scratch) attribute for Generation Data Groups. + - Specifies what action is to be taken for a generation data set located on disk + volumes when the data set is uncataloged from the GDG base as a result of + EMPTY/NOEMPTY processing. + type: bool + required: false volumes: description: - > @@ -682,7 +787,9 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSet +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( + DataSet, GenerationDataGroup, MVSDataSet, Member +) from ansible.module_utils.basic import AnsibleModule import re @@ -702,6 +809,7 @@ "member", "hfs", "zfs", + "gdg", ] DATA_SET_FORMATS = [ @@ -821,9 +929,17 @@ def data_set_name(contents, dependencies): dsname, re.IGNORECASE, ): - if not ( + if ( + re.fullmatch( + r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([+-]{0,1}\d{1,4}\)){0,1}$", + dsname, + re.IGNORECASE, + ) + ): + return dsname.upper() + elif not ( re.fullmatch( - r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}$", + r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\(([A-Z$#@]{1}[A-Z0-9$#@]{0,7})\)){0,1}$", dsname, re.IGNORECASE, ) @@ -1036,16 +1152,51 @@ def data_set_type(contents, dependencies): # return None if contents is None: return "pds" + + if contents == "gdg" and dependencies.get("state") == "present" and dependencies.get("limit") is None: + raise ValueError( + "Limit must be provided when data set type is gdg and state=present." + ) types = "|".join(DATA_SET_TYPES) if not re.fullmatch(types, contents, re.IGNORECASE): raise ValueError( - "Value {0} is invalid for type argument. type must be of of the following: {1}.".format( + "Value {0} is invalid for type argument. type must be one of the following: {1}.".format( contents, ", ".join(DATA_SET_TYPES) ) ) return contents +def limit_type(contents, dependencies): + """Validates limit is valid. Limit option is dependent on state. + Returns limit. + + Parameters + ---------- + contents : int + Limit for GDG type. + dependencies : dict + Any dependencies needed for contents argument to be validated. + + Returns + ------- + int + The limit for GDG type. + + Raises + ------ + ValueError + Value is invalid. + """ + if not isinstance(contents, int): + raise ValueError( + "Value {0} is invalid for limit option. Limit must be an integer from 1 to 255, if extended up to 999.".format( + contents + ) + ) + return contents + + # * dependent on state def volumes(contents, dependencies): """Validates volume is valid. @@ -1184,18 +1335,68 @@ def key_offset(contents, dependencies): return contents -def perform_data_set_operations(name, state, **extra_args): +def get_data_set_handler(**params): + """Get object initialized based on parameters. + Parameters + ---------- + **params + Data set parameters. + + Returns + ------- + MVSDataSet or GenerationDataGroup or Member object. + """ + if params.get("type") == "gdg": + return GenerationDataGroup( + name=params.get("name"), + limit=params.get("limit", None), + empty=params.get("empty", None), + purge=params.get("purge", None), + scratch=params.get("scratch", None), + extended=params.get("extended", None), + fifo=params.get("fifo", None), + ) + elif params.get("type") == "member": + return Member( + name=params.get("name") + ) + else: + return MVSDataSet( + name=params.get("name"), + record_format=params.get("record_format", None), + volumes=params.get("volumes", None), + data_set_type=params.get("type", None), + block_size=params.get("block_size", None), + record_length=params.get("record_length", None), + space_primary=params.get("space_primary", None), + space_secondary=params.get("space_secondary", None), + space_type=params.get("space_type", None), + directory_blocks=params.get("directory_blocks", None), + key_length=params.get("key_length", None), + key_offset=params.get("key_offset", None), + sms_storage_class=params.get("sms_storage_class", None), + sms_data_class=params.get("sms_data_class", None), + sms_management_class=params.get("sms_management_class", None), + ) + + +def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): """Calls functions to perform desired operations on one or more data sets. Returns boolean indicating if changes were made. Parameters ---------- - name : str - Name of the dataset. + data_set : {object | MVSDataSet | Member | GenerationDataGroup } + Data set object to perform operations on. state : str State of the data sets. - **extra_args : dict - Properties of the data sets. + replace : str + Whether or not replace an existing data set if it has the same name. + tmp_hlq : str + Temporary high level qualifier to use for temporary data sets. + force : str + Whether or not the data set can be shared with others during the + operation. Returns ------- @@ -1203,21 +1404,20 @@ def perform_data_set_operations(name, state, **extra_args): If changes were made. """ changed = False - # passing in **extra_args forced me to modify the acceptable parameters - # for multiple functions in data_set.py including ensure_present, replace - # and create where the force parameter has no bearing. - if state == "present" and extra_args.get("type") != "member": - changed = DataSet.ensure_present(name, **extra_args) - elif state == "present" and extra_args.get("type") == "member": - changed = DataSet.ensure_member_present(name, extra_args.get("replace")) - elif state == "absent" and extra_args.get("type") != "member": - changed = DataSet.ensure_absent(name, extra_args.get("volumes")) - elif state == "absent" and extra_args.get("type") == "member": - changed = DataSet.ensure_member_absent(name, extra_args.get("force")) + if state == "present" and data_set.data_set_type in ["member", "gdg"]: + changed = data_set.ensure_present(replace=replace) + elif state == "present": + changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq, force=force) + elif state == "absent" and data_set.data_set_type == "member": + changed = data_set.ensure_absent(force=force) + elif state == "absent" and data_set.data_set_type == "gdg": + changed = data_set.ensure_absent(force=force) + elif state == "absent": + changed = data_set.ensure_absent() elif state == "cataloged": - changed = DataSet.ensure_cataloged(name, extra_args.get("volumes")) + changed = data_set.ensure_cataloged() elif state == "uncataloged": - changed = DataSet.ensure_uncataloged(name) + changed = data_set.ensure_uncataloged() return changed @@ -1345,7 +1545,7 @@ def parse_and_validate_args(params): default="present", choices=["present", "absent", "cataloged", "uncataloged"], ), - type=dict(type=data_set_type, required=False, dependencies=["state"]), + type=dict(type=data_set_type, required=False, dependencies=["state", "limit"]), space_type=dict( type=space_type, required=False, @@ -1368,7 +1568,7 @@ def parse_and_validate_args(params): ), # I know this alias is odd, ZOAU used to document they supported # SMS data class when they were actually passing as storage class - # support for backwards compatability with previous module versions + # support for backwards compatibility with previous module versions sms_storage_class=dict( type=sms_class, required=False, @@ -1397,6 +1597,14 @@ def parse_and_validate_args(params): type="bool", default=False, ), + # GDG options + limit=dict(type=limit_type, required=False), + empty=dict(type="bool", required=False), + purge=dict(type="bool", required=False), + scratch=dict(type="bool", required=False), + extended=dict(type="bool", required=False), + fifo=dict(type="bool", required=False), + # End of GDG options volumes=dict( type=volumes, required=False, @@ -1512,6 +1720,13 @@ def run_module(): type="bool", default=False, ), + # GDG options + limit=dict(type="int", required=False, no_log=False), + empty=dict(type="bool", required=False), + purge=dict(type="bool", required=False), + scratch=dict(type="bool", required=False), + extended=dict(type="bool", required=False), + fifo=dict(type="bool", required=False), volumes=dict(type="raw", required=False, aliases=["volume"]), force=dict( type="bool", @@ -1575,6 +1790,14 @@ def run_module(): type="bool", default=False, ), + # GDG options + limit=dict(type="int", required=False, no_log=False), + empty=dict(type="bool", required=False), + purge=dict(type="bool", required=False), + scratch=dict(type="bool", required=False), + extended=dict(type="bool", required=False), + fifo=dict(type="bool", required=False), + # End of GDG options volumes=dict( type="raw", required=False, @@ -1635,33 +1858,14 @@ def run_module(): result["names"] = [d.get("name", "") for d in data_set_param_list] for data_set_params in data_set_param_list: - # This *appears* redundant, bit the parse_and_validate reinforces the default value for record_type - if data_set_params.get("batch") is not None: - for entry in data_set_params.get("batch"): - if entry.get('type') is not None and entry.get("type") in DATA_SET_TYPES_VSAM: - entry["record_format"] = None - if data_set_params.get("type") is not None: - data_set_params["type"] = None - if data_set_params.get("state") is not None: - data_set_params["state"] = None - if data_set_params.get("space_type") is not None: - data_set_params["space_type"] = None - if data_set_params.get("space_primary") is not None: - data_set_params["space_primary"] = None - if data_set_params.get("space_secondary") is not None: - data_set_params["space_secondary"] = None - if data_set_params.get("replace") is not None: - data_set_params["replace"] = None - if data_set_params.get("record_format") is not None: - data_set_params["record_format"] = None - else: - if data_set_params.get("type") in DATA_SET_TYPES_VSAM: - if data_set_params.get("record_format") is not None: - data_set_params["record_format"] = None - - # remove unnecessary empty batch argument + # this returns MVSDataSet, Member or GenerationDataGroup + data_set = get_data_set_handler(**data_set_params) result["changed"] = perform_data_set_operations( - **data_set_params + data_set=data_set, + state=data_set_params.get("state"), + replace=data_set_params.get("replace"), + tmp_hlq=data_set_params.get("tmp_hlq"), + force=data_set_params.get("force"), ) or result.get("changed", False) except Exception as e: module.fail_json(msg=repr(e), **result) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 7dc1d9073..d01705597 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -868,14 +868,14 @@ def test_filesystem_create_and_mount(ansible_zos_module, filesystem): results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, type=filesystem) temp_dir_name = make_tempfile(hosts, directory=True) results2 = hosts.all.command( - cmd="mount -t {0} -f {1} {2}".format( + cmd="usr/sbin/mount -t {0} -f {1} {2}".format( filesystem, DEFAULT_DATA_SET_NAME, temp_dir_name ) ) results3 = hosts.all.shell(cmd="cd {0} ; df .".format(temp_dir_name)) # clean up - results4 = hosts.all.command(cmd="unmount {0}".format(temp_dir_name)) + results4 = hosts.all.command(cmd="usr/sbin/unmount {0}".format(temp_dir_name)) results5 = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") results6 = hosts.all.file(path=temp_dir_name, state="absent") @@ -963,3 +963,108 @@ def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): assert result.get("module_stderr") is None finally: hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + +""" +GDG base tests: +- Create a new GDG. + +- Create a new GDS (src_type: seq, pds, pdse). +- Delete an empty GDG. + +""" +@pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) +def test_gdg_create_and_delete(ansible_zos_module, dstype): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + results = hosts.all.zos_data_set(name=data_set_name, state="present", type="gdg", limit=3) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(-1)", state="absent") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(0)", state="absent") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=data_set_name, state="absent") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + finally: + hosts.all.zos_data_set(name=data_set_name, state="absent", force=True) + + +def test_gdg_create_and_delete_force(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + results = hosts.all.zos_data_set(name=data_set_name, state="present", type="gdg", limit=3) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type="seq") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=data_set_name, state="absent", type="gdg") + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed", False) is True + assert "DatasetDeleteError" in result.get("msg") + results = hosts.all.zos_data_set(name=data_set_name, state="absent", force=True, type="gdg") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + finally: + hosts.all.zos_data_set(name=data_set_name, state="absent", force=True, type="gdg") + + +def test_create_special_chars(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name(symbols=True) + results = hosts.all.zos_data_set(name=data_set_name, state="present", type="seq") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"dls ANSIBLE.*") + for result in results.contacted.values(): + assert data_set_name in result.get("stdout") + results = hosts.all.zos_data_set(name=data_set_name, state="absent",) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + finally: + hosts.all.zos_data_set(name=data_set_name, state="absent") + + +def test_create_member_special_chars(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name(symbols=True) + results = hosts.all.zos_data_set(name=data_set_name, state="present", type="pds") + results = hosts.all.zos_data_set(name=data_set_name+ "(M@M#R)", state="present", type="member") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"dls ANSIBLE.*") + for result in results.contacted.values(): + assert data_set_name in result.get("stdout") + results = hosts.all.zos_data_set(name=data_set_name, state="absent",) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + finally: + hosts.all.zos_data_set(name=data_set_name, state="absent") + diff --git a/tests/helpers/dataset.py b/tests/helpers/dataset.py index c8050516a..8e700415c 100644 --- a/tests/helpers/dataset.py +++ b/tests/helpers/dataset.py @@ -20,15 +20,24 @@ import time import re -def get_tmp_ds_name(mlq_size=7, llq_size=7): + +def get_tmp_ds_name(mlq_size=7, llq_size=7, symbols=False): """ Function or test to ensure random names of datasets the values of middle and last qualifier can change size by parameter, - but by default includes one letter.""" + but by default includes one letter. + Also includes indication if symbols should be in the string, default=false.""" ds = "ANSIBLE" + "." - ds += "P" + get_random_q(mlq_size).upper() + "." + if symbols: + ds += "P" + get_random_qs(mlq_size).upper() + "." + else: + ds += "P" + get_random_q(mlq_size).upper() + "." ds += "T" + str(int(time.time()*1000))[-7:] + "." - ds += "C" + get_random_q(llq_size).upper() - return ds + if symbols: + ds += "C" + get_random_qs(llq_size).upper() + else: + ds += "C" + get_random_q(llq_size).upper() + + return(ds) def get_random_q(size=7): @@ -45,4 +54,25 @@ def get_random_q(size=7): ): random_q = ''.join(random.choice(letters)for iteration in range(size)) count += 1 + return random_q + +def get_random_qs(size=7): + """ Function or test to ensure random hlq of datasets, including symbol characters""" + # Generate the first random hlq of size pass as parameter + letters = string.ascii_uppercase + string.digits + special_chars = "$@#-" + random_q = ''.join(random.choice(letters)for iteration in range(size)) + random_char = random_q[random.choice(range(0, size))] + random_q = random_q.replace(random_char, random.choice(special_chars)) + count = 0 + # Generate a random HLQ and verify if is valid, if not, repeat the process + while count < 5 and not re.fullmatch( + r"^(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})", + random_q, + re.IGNORECASE, + ): + random_q = ''.join(random.choice(letters)for iteration in range(size)) + random_char = random_q[random.choice(range(0, size))] + random_q = random_q.replace(random_char, random.choice(special_chars)) + count += 1 return random_q \ No newline at end of file From 879277297511ef38791268af73bc1c32fa87fb8c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 16 May 2024 09:14:27 -0600 Subject: [PATCH 384/495] [Documentation][better_arg_parser] Add and standarize docstrings on module-utils/better_arg_parser.py (#1385) * Add and standarize docstrings on module-utils/better_arg_parser.py * Add changelog fragment --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...385-update-docstring-better_arg_parser.yml | 3 + plugins/module_utils/better_arg_parser.py | 770 ++++++++++++------ 2 files changed, 510 insertions(+), 263 deletions(-) create mode 100644 changelogs/fragments/1385-update-docstring-better_arg_parser.yml diff --git a/changelogs/fragments/1385-update-docstring-better_arg_parser.yml b/changelogs/fragments/1385-update-docstring-better_arg_parser.yml new file mode 100644 index 000000000..1b4a0b0f3 --- /dev/null +++ b/changelogs/fragments/1385-update-docstring-better_arg_parser.yml @@ -0,0 +1,3 @@ +trivial: + - better_arg_parser - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1385). \ No newline at end of file diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index ef361e7f8..449b25314 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -53,34 +53,49 @@ def __init__( """Holds all of the attributes that define a particular argument. A BetterArg object can contain nested BetterArg objects. - Arguments: - object {object} -- The most base class type. - arg_parser {BetterArgParser} -- The instance of BetterArgParser + Parameters + ---------- + object : object + The most base class type. + arg_parser : BetterArgParser + The instance of BetterArgParser used to create the BetterArg. Used to call BetterArgParser.handle_args() when nested BetterArg objects need to be defined. - name {str} -- The name of the argument to define. + name : str + The name of the argument to define. - Keyword Arguments: - elements {Union[str, function]} -- Used to specify the expected + Keyword Parameters + ------------------ + elements : Union[str, function] + Used to specify the expected type for each list element when the arg_type is 'list'. (default: {None}) - options {dict} -- When arg_type or elements = 'dict', a dictionary + options : dict + When arg_type or elements = 'dict', a dictionary containing details for a nested group of arguments should be provided. (default: {None}) - aliases {list[str]} -- A list of alternative names that can be + aliases : list[str] + A list of alternative names that can be used to refer to the argument. (default: {[]}) - dependencies {list} -- A list of arguments that should be resolved + dependencies : list + A list of arguments that should be resolved before parsing this argument. (default: {[]}) - required {Union[bool, function]} -- Determines if later parsing + required : Union[bool, function] + Determines if later parsing should fail when no value is provided for the argument. Not necessary if default is provided. (default: {False}) - default {Union[str, int, bool, function]} -- The default value that the + default : Union[str, int, bool, function] + The default value that the argument should be set to when none is provided. (default: {None}) - choices {list[Union[str, int, bool]]} -- The list of valid contents for the argument. - mutually_exclusive {list[list[str]]} -- A list containing lists of mutually exclusive argument names. + choices : list[Union[str, int, bool]] + The list of valid contents for the argument. + mutually_exclusive : list[list[str]] + A list containing lists of mutually exclusive argument names. (default: {None}) - arg_type {Union[str, function]} -- The type the argument contents should be. (default: {'str'}) - type {Union[str, function]} -- The type the argument contents should be. Alternative to arg_type. + arg_type : Union[str, function] + The type the argument contents should be. (default: {'str'}) + type : Union[str, function] + The type the argument contents should be. Alternative to arg_type. (default: {'str'}) """ if aliases is None: @@ -114,15 +129,19 @@ def __init__(self, arg_name, contents, resolved_args, arg_defs): """Sets, formats and validates an argument and its contents based on its matching BetterArg object. - Arguments: - object {object} -- The most base class type. - arg_name {str} -- The name of the argument. - contents {dict} -- The argument contents to be handled by the + Parameters + ---------- + arg_name : str + The name of the argument. + contents : dict + The argument contents to be handled by the argument's BetterArg object - resolved_args {dict} -- Contains all of the dependencies and their + resolved_args : dict + Contains all of the dependencies and their contents, which have already been handled by a BetterArgHandler, for use during current arguments handling operations. - arg_defs {dict[str, BetterArg]} -- All of the BetterArg argument + arg_defs : dict[str, BetterArg] + All of the BetterArg argument definitions for current argument depth. """ self.arg_name = arg_name @@ -154,8 +173,10 @@ def __init__(self, arg_name, contents, resolved_args, arg_defs): def handle_arg(self): """Performs all setting, formatting and validation operations for a single argument. - Returns: - dict -- The arguments contents after any necessary operations. + Returns + ------- + dict + The arguments contents after any necessary operations. """ self._resolve_required() @@ -169,13 +190,19 @@ def handle_arg(self): def _list_type(self, contents, resolved_dependencies): """Resolver for list type arguments. - Arguments: - contents {list[Union[int, str, bool, dict]]} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + Parameters + ---------- + contents : list[Union[int, str, bool, dict]] + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Returns: - list[Union[int, str, bool, dict]] -- The arguments contents after any necessary operations. + + Returns + ------- + Union[list[int, str, bool, dict]] + The arguments contents after any necessary operations. """ # TODO: determine how to handle resolved dependencies for list items, probably good as-is updated_contents = [] @@ -198,14 +225,19 @@ def _list_type(self, contents, resolved_dependencies): def _dict_type(self, contents, resolved_dependencies): """Resolver for dict type arguments. - Arguments: - contents {dict} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + Parameters + ---------- + contents : dict + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Returns: - dict -- The arguments contents after any necessary operations. + Returns + ------- + dict + The arguments contents after any necessary operations. """ updated_contents = {} # for key, value in contents.items(): @@ -222,17 +254,24 @@ def _dict_type(self, contents, resolved_dependencies): def _str_type(self, contents, resolve_dependencies): """Resolver for str type arguments. - Arguments: - contents {str} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + Parameters + ---------- + contents : str + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type + Returns + ------- + str + The arguments contents after any necessary operations. - Returns: - str -- The arguments contents after any necessary operations. + Raises + ------ + ValueError + When contents is invalid argument type. """ if not isinstance(contents, str): raise ValueError('Invalid argument "{0}" for type "str".'.format(contents)) @@ -241,17 +280,24 @@ def _str_type(self, contents, resolve_dependencies): def _int_type(self, contents, resolve_dependencies): """Resolver for int type arguments. - Arguments: - contents {Union[int, str]} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + Parameters + ---------- + contents : Union[int, str] + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type + Returns + ------- + int + The arguments contents after any necessary operations. - Returns: - int -- The arguments contents after any necessary operations. + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch(r"[0-9]+", str(contents)): raise ValueError('Invalid argument "{0}" for type "int".'.format(contents)) @@ -260,34 +306,49 @@ def _int_type(self, contents, resolve_dependencies): def _bool_type(self, contents, resolve_dependencies): """Resolver for bool type arguments. - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - bool -- The arguments contents after any necessary operations. + Returns + ------- + bool + The arguments contents after any necessary operations. + + Raises + ------ + ValueError: When contents is invalid argument type. """ if not isinstance(contents, bool): raise ValueError('Invalid argument "{0}" for type "bool".'.format(contents)) return contents def _path_type(self, contents, resolve_dependencies): - """Resolver for path type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for path type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type """ contents = BetterArgHandler.fix_local_path(contents) if not path.isabs(str(contents)): @@ -317,16 +378,24 @@ def _path_type(self, contents, resolve_dependencies): def _data_set_type(self, contents, resolve_dependencies): """Resolver for data_set type arguments. - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch( r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)|\(([-+]?[0-9]+)\)){0,1}$", @@ -339,18 +408,26 @@ def _data_set_type(self, contents, resolve_dependencies): return str(contents) def _data_set_base_type(self, contents, resolve_dependencies): - """Resolver for data_set_base type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for data_set_base type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch( r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}$", @@ -363,18 +440,26 @@ def _data_set_base_type(self, contents, resolve_dependencies): return str(contents) def _data_set_member_type(self, contents, resolve_dependencies): - """Resolver for data_set_member type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for data_set_member type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch( r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)$", @@ -387,18 +472,24 @@ def _data_set_member_type(self, contents, resolve_dependencies): return str(contents) def _qualifier_type(self, contents, resolve_dependencies): - """Resolver for qualifier type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for qualifier type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. + Returns + ------- + str + The arguments contents after any necessary operations. + Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + ValueError: When contents is invalid argument type. """ if not fullmatch( r"^[A-Z]{1}[A-Z0-9]{0,7}$", @@ -411,18 +502,26 @@ def _qualifier_type(self, contents, resolve_dependencies): return str(contents) def _qualifier_or_empty_type(self, contents, resolve_dependencies): - """Resolver for qualifier type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for qualifier type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch( r"^[A-Z]{1}[A-Z0-9]{0,7}$", @@ -435,18 +534,26 @@ def _qualifier_or_empty_type(self, contents, resolve_dependencies): return str(contents) def _qualifier_pattern_type(self, contents, resolve_dependencies): - """Resolver for qualifier_pattern type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for qualifier_pattern type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch( r"^(?:[A-Z]{1}[A-Z0-9]{0,7})|(?:\*{1})|(?:[A-Z]{1}[A-Z0-9]{0,6}\*{1})$", @@ -459,18 +566,26 @@ def _qualifier_pattern_type(self, contents, resolve_dependencies): return str(contents) def _volume_type(self, contents, resolve_dependencies): - """Resolver for volume type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for volume type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch( r"^[A-Z0-9@#$]{1,6}$", @@ -483,18 +598,26 @@ def _volume_type(self, contents, resolve_dependencies): return str(contents) def _dd_type(self, contents, resolve_dependencies): - """Resolver for dd type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for dd type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch( r"^[A-Z$#@][A-Z0-9@#$]{0,7}$", @@ -506,13 +629,17 @@ def _dd_type(self, contents, resolve_dependencies): @staticmethod def fix_local_path(given_path): - """Adapter for local/USS path abbreviations + """Adapter for local/USS path abbreviations. - Arguments: - path given as input, which may need adjustment + Parameters + ---------- + given_path : str + Path given as input, which may need adjustment. - Returns: - str -- The path, after leading ~, .. or . has been adjusted + Returns + ------- + str + The path, after leading ~, .. or . has been adjusted. """ final_path = given_path if given_path.startswith("~"): @@ -527,18 +654,26 @@ def fix_local_path(given_path): return str(final_path) def _data_set_or_path_type(self, contents, resolve_dependencies): - """Resolver for data_set_or_path type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for data_set_or_path type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch( # HLQ and all middle level qualifiers. Last qualifier bef members. Normal members. GDS members. @@ -558,18 +693,26 @@ def _data_set_or_path_type(self, contents, resolve_dependencies): return str(contents) def _encoding_type(self, contents, resolve_dependencies): - """Resolver for encoding type arguments - - Arguments: - contents {bool} -- The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + """Resolver for encoding type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch(r"^[A-Z0-9-]{2,}$", str(contents), IGNORECASE): raise ValueError( @@ -581,19 +724,25 @@ def _encoding_type(self, contents, resolve_dependencies): def is_function(some_var): """Determines if variable is a function. - Arguments: - some_var {Union[str, int, bool, function]} -- The variable to test for type. + Parameters + ---------- + some_var : Union[str, int, bool, function] + The variable to test for type. - Returns: - bool -- True if variable some_var is function, False otherwise. + Returns + ------- + bool + True if variable some_var is function, False otherwise. """ return isinstance(some_var, types.FunctionType) def _resolve_required(self): """Perform operations to determine if an argument is required. - Raises: - ValueError: When no value or defaults are provided for a required argument. + Raises + ------ + ValueError + When no value or defaults are provided for a required argument. """ required = self.arg_def.required if BetterArgHandler.is_function(self.arg_def.required): @@ -606,8 +755,10 @@ def _resolve_required(self): def _resolve_default(self): """Resolve the default value of an argument when no value is provided. - Returns: - Union[str, int, bool, list, dict] -- The updated contents of the argument. + Returns + ------- + Union[str, int, bool, list, dict] + The updated contents of the argument. """ if self.contents is not None: return self.contents @@ -622,8 +773,10 @@ def _resolve_default(self): def _resolve_choices(self): """Verify the argument contents are a valid choice when list of choices is provided. - Raises: - ValueError: The provided value is not a valid choice. + Raises + ------ + ValueError + The provided value is not a valid choice. """ if self.arg_def.choices and len(self.arg_def.choices) > 0: if self.contents not in self.arg_def.choices: @@ -638,11 +791,15 @@ def _resolve_arg_type(self): This may include manipulating argument contents and/or any necessary validation. - Raises: - ValueError: When the provided arg_type is invalid. + Returns + ------- + Union[str, int, bool, list, dict] + The argument's contents after any necessary processing by type handler. - Returns: - Union[str, int, bool, list, dict] -- The argument's contents after any necessary processing by type handler. + Raises + ------ + ValueError + When the provided arg_type is invalid. """ if BetterArgHandler.is_function(self.arg_def.arg_type): return self._call_arg_function(self.arg_def.arg_type, self.contents) @@ -663,11 +820,15 @@ def build_resolved_dependency_dict(self, resolved_args): a dependency. These dependency arguments should already be resolved by the time this method is called. - Arguments: - resolved_args {dict} -- Arguments that have already finished parser processing. + Parameters + ---------- + resolved_args : dict + Arguments that have already finished parser processing. - Returns: - dict -- Subset of resolved_args containing any dependencies required by current argument. + Returns + ------- + dict + Subset of resolved_args containing any dependencies required by current argument. """ resolved_dependencies = {} for dependency in self.arg_def.dependencies: @@ -679,13 +840,17 @@ def _assert_mutually_exclusive(self, contents): """Assert none of the provided arguments break mutual exclusivity. - Arguments: - contents {dict} -- Argument dict for level of arguments. + Parameters + ---------- + contents : dict + Argument dict for level of arguments. Need to determine if any 2 or more of the arguments in the dict are breaking mutual exclusivity rules. - Raises: - ValueError: When two or more mutually exclusive arguments are found. + Raises + ------ + ValueError + When two or more mutually exclusive arguments are found. """ for name, name_list in self.arg_def.mutually_exclusive.items(): arg = contents.get(name) @@ -703,11 +868,15 @@ def _assert_mutually_exclusive(self, contents): def _num_of_params(self, arg_function): """Get the number of parameters accepted by a function. - Args: - arg_function (function): The function to inspect. + Parameters + ---------- + arg_function : function + The function to inspect. - Returns: - int: The number of parameters the function accepts. + Returns + ------- + int + The number of parameters the function accepts. """ spec = getfullargspec(arg_function) length = len(spec[0]) @@ -721,15 +890,21 @@ def _call_arg_function(self, arg_function, contents): """Call a function with the correct number of arguments. - Arguments: - arg_function {function} -- The function to call. - contents {Union[str,list,dict,int,bool]} -- The argument contents to pass to the function. - - Raises: - ValueError: When the provided function's number of parameters do not match BetterArgParser spec. - - Returns: - ?? -- Returns the result of the function call. + Parameters + ---------- + arg_function : function + The function to call. + contents : Union[str,list,dict,int,bool] + The argument contents to pass to the function. + + Returns + ------- + Any -- Returns the result of the function call. + + Raises + ------ + ValueError + When the provided function's number of parameters do not match BetterArgParser spec. """ number_of_params = self._num_of_params(arg_function) if number_of_params == 2: @@ -756,13 +931,20 @@ def _job_identifier(self, contents, resolve_dependencies): Other characters can be letters, numbers, or national (#, $, @) characters. If the text string contains #, $, or @, enclose the text string in single or double quotation marks. - Arguments: - contents {str} -- The contents of the argument. + Parameters + ---------- + contents : str + The contents of the argument. - Raises: - ValueError: When contents is invalid argument type - Returns: - str -- The arguments contents after any necessary operations. + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. """ if not fullmatch( r"(^[a-zA-Z$#@%}]{1}[0-9a-zA-Z$#@%*]{1,7})|(^['\*']{1})", @@ -781,9 +963,12 @@ def __init__(self, arg_dict): match defined criteria, and perform any necessary operations on the provided arguments. - Arguments: - object {object} -- The most base type - arg_dict {dict[str, dict]} -- a list of key:value pairs where key = argument name + Parameters + ---------- + object : object + The most base type + arg_dict : dict[str, dict] + A list of key:value pairs where key = argument name and value = BetterArg object """ self.aliases = {} @@ -804,11 +989,15 @@ def handle_args(self, arg_dict): definitions, swaps out alias names, and sorts and verifies no invalid or cyclic dependencies exist. - Arguments: - arg_dict {dict} -- The argument definitions used to generate BetterArg objects. + Parameters + ---------- + arg_dict : dict + The argument definitions used to generate BetterArg objects. - Returns: - OrderedDict[str, BetterArg] -- The defined arguments, sorted based on their dependencies. + Returns + -------- + OrderedDict[str, BetterArg] + The defined arguments, sorted based on their dependencies. """ args = {} for key, value in arg_dict.items(): @@ -824,12 +1013,16 @@ def parse_args(self, arg_dict): """Parse provided argument values using corresponding BetterArg argument definition. - Arguments: - arg_dict {dict} -- The arguments to parse where key=argument name/alias + Parameters + ---------- + arg_dict : dict + The arguments to parse where key=argument name/alias and value=argument contents. - Returns: - dict -- The arguments with alias names swapped for real names + Returns + ------- + dict + The arguments with alias names swapped for real names and contents after any necessary operations and checks have been performed. """ parsed_args = {} @@ -850,18 +1043,27 @@ def _add_alias(self, arg_name, arg_aliases=None, aliases=None): """Add alias to an alias dictionary that can be used to simplify alias->name determinations. - Arguments: - arg_name {str} -- the name of the argument - - Keyword Arguments: - arg_aliases {list[str]} -- The list of aliases for the argument name (default: {None}) - aliases {dict} -- The dictionary containing all of the currently defined aliases. (default: {None}) - - Raises: - ValueError: When conflicting aliases are found. - - Returns: - dict -- The updated dict of aliases + Parameters + ---------- + arg_name : str + The name of the argument. + + Keyword Parameters + ------------------ + arg_aliases : list[str] + The list of aliases for the argument name (default: {None}). + aliases : dict + The dictionary containing all of the currently defined aliases. (default: {None}). + + Returns + ------- + dict + The updated dict of aliases. + + Raises + ------ + ValueError + When conflicting aliases are found. """ if arg_aliases is None: arg_aliases = [] @@ -883,12 +1085,17 @@ def _swap_alias_for_real_names(self, args, aliases): with their 'real' name used to refer to the argument throughout parser. - Arguments: - args {dict} -- Arguments for BetterArgParser to parse - aliases {dict} -- The dictionary containing all of the currently defined aliases. - - Returns: - dict -- The contents from provided argument where they keys + Parameters + ---------- + args : dict + Arguments for BetterArgParser to parse. + aliases : dict + The dictionary containing all of the currently defined aliases. + + Returns + ------- + dict + The contents from provided argument where they keys have been swapped for 'real' argument names where necessary. """ renamed_args = {} @@ -901,11 +1108,15 @@ def handle_mutually_exclusive_args(self, mutually_exclusive): """Format mutually exclusive argument definitions. Into dictionary for simplified exclusivity checking. - Arguments: - mutually_exclusive {list[list[str]]} -- List of lists containing mutually exclusive arguments. + Parameters + ---------- + mutually_exclusive : list[list[str]] + List of lists containing mutually exclusive arguments. - Returns: - dict -- Dict where key is the mutually exclusive + Returns + ------- + dict + Dict where key is the mutually exclusive argument name and value is a list of all arguments it is mutually exclusive with. """ @@ -921,11 +1132,15 @@ def handle_mutually_exclusive_args(self, mutually_exclusive): def _assert_mutually_exclusive_args_structure(self, mutually_exclusive): """Used to enforce structure of mutually_exclusive argument. - Arguments: - mutually_exclusive {list[list[str]]} -- The mutually exclusive argument to validate. + Parameters + ---------- + mutually_exclusive : list[list[str]] + The mutually exclusive argument to validate. - Raises: - ValueError: When not in proper format. + Raises + ------ + ValueError + When not in proper format. """ try: if isinstance(mutually_exclusive, list): @@ -948,14 +1163,20 @@ def _assert_no_invalid_dependencies(self, args): """Verify that no dependencies are requested that do not have an argument with matching name. - Arguments: - args {dict[str, BetterArg]} -- All of the BetterArg argument definitions for current argument depth. + Parameters + ---------- + args : dict[str, BetterArg] + All of the BetterArg argument definitions for current argument depth. - Raises: - ValueError: When invalid dependency found. + Returns + ------- + bool + Always returns True when no invalid dependencies found. - Returns: - bool -- Always returns True when no invalid dependencies found. + Raises + ------ + ValueError + When invalid dependency found. """ valid_names = args.keys() dependencies = [] @@ -974,11 +1195,15 @@ def _sort_args_by_dependencies(self, args): """Sort arguments based on their dependencies to other arguments. Used with _dependency_sort_helper() to implement topographical sorting. - Arguments: - args {dict[str, BetterArg]} -- All of the BetterArg argument definitions for current argument depth. + Parameters + ---------- + args : dict[str, BetterArg] + All of the BetterArg argument definitions for current argument depth. - Returns: - OrderedDict[str, BetterArg] -- All of the BetterArg argument definitions for current argument depth, + Returns + ------- + OrderedDict[str, BetterArg] + All of the BetterArg argument definitions for current argument depth, sorted based on dependencies. """ visited = {name: False for name in args} @@ -998,20 +1223,28 @@ def _dependency_sort_helper( """Recursive helper function for _sort_args_by_dependencies(). Used with _sort_args_by_dependencies() to implement topographical sorting. - Arguments: - args {dict[str, BetterArg]} -- All of the BetterArg argument definitions for current argument depth. - name {str} -- the name of the argument - visited {dict[str, bool]} -- holds the name of each argument in a key with + Parameters + ---------- + args : dict[str, BetterArg] + All of the BetterArg argument definitions for current argument depth. + name : str + The name of the argument + visited : dict[str, bool] + Holds the name of each argument in a key with a boolean value to identify whether operations have already been performed on the argument - dependencies {dict[str, dict[str, bool]]} -- Each outer key represents one argument where the + dependencies : dict[str, dict[str, bool]] + Each outer key represents one argument where the value is a dictionary with key=name of argument outer key argument is dependent on. Boolean value is always true and is a placeholder. - ordered_arg_defs {dict[str, BetterArg]} -- argument definitions + ordered_arg_defs : dict[str, BetterArg] + Argument definitions from arg_defs sorted based on their dependencies, output is in the reverse of the order desired. Reverse sorting is handled in _sort_args_by_dependencies(). - Raises: - RuntimeError: When cyclic dependencies are found + Raises + ------ + RuntimeError + When cyclic dependencies are found """ visited[name] = True dependencies[name] = { @@ -1031,11 +1264,15 @@ def _dependency_sort_helper( def _has_cycle(self, args): """Determines if cyclic dependencies exist between arguments. - Arguments: - args {dict[str, BetterArg]} -- All of the BetterArg argument definitions for current argument depth. + Parameters + ---------- + args : dict[str, BetterArg] + All of the BetterArg argument definitions for current argument depth. - Returns: - bool -- True if cycle exists False otherwise + Returns + ------- + bool + True if cycle exists False otherwise. """ graph = defaultdict(list) arg_name_to_num = {} @@ -1057,14 +1294,21 @@ def _has_cycle(self, args): def _is_cyclic_helper(self, i, visited, stack, graph): """Works with _has_cycle() to determine if cyclic dependencies exist between arguments. - Arguments: - i {integer} -- The index for the current argument - visited {list[bool]} -- Maintains a record of which arguments have been visited - stack {list[bool]} -- Used with visited to identify cycles. - graph {defaultdict[list]} -- The graph representing our argument dependencies as numbers - - Returns: - bool -- True if cycle exists, False otherwise + Parameters + ---------- + i : int + The index for the current argument. + visited : list[bool] + Maintains a record of which arguments have been visited. + stack : list[bool] + Used with visited to identify cycles. + graph : defaultdict[list] + The graph representing our argument dependencies as numbers. + + Returns + ------- + bool + True if cycle exists, False otherwise. """ visited[i] = True stack[i] = True From f60dbb88320b43b1fb2ac7cb1c0be50fa50eee16 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 16 May 2024 09:14:44 -0600 Subject: [PATCH 385/495] [Documentation][system] Standarize docstrings in module_utils/system.py (#1363) * Modify google style to numpy * Add changelog fragment * Standarize numpy style * Standarize numpy style * Added year * Update 1363-update-docstring-system.yml --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1363-update-docstring-system.yml | 3 ++ plugins/module_utils/system.py | 34 ++++++++++++------- 2 files changed, 24 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1363-update-docstring-system.yml diff --git a/changelogs/fragments/1363-update-docstring-system.yml b/changelogs/fragments/1363-update-docstring-system.yml new file mode 100644 index 000000000..461a4c9b9 --- /dev/null +++ b/changelogs/fragments/1363-update-docstring-system.yml @@ -0,0 +1,3 @@ +trivial: + - system - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1363). diff --git a/plugins/module_utils/system.py b/plugins/module_utils/system.py index 54ec90dca..016db26b9 100644 --- a/plugins/module_utils/system.py +++ b/plugins/module_utils/system.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -34,19 +34,23 @@ def is_posix(): - """ Determine if the system is POSIX certified or compliant + """Determine if the system is POSIX certified or compliant. - Returns: - bool -- Whether the system is POSIX + Returns + ------- + bool + Whether the system is POSIX. """ return OS_NAME == "posix" def is_nix(): - """ Determine if the system is a variant of Unix, supported by Python. + """Determine if the system is a variant of Unix, supported by Python. - Returns: - bool -- Whether the system is Unix-based + Returns + ------- + bool + Whether the system is Unix-based. """ if not is_posix(): return False @@ -58,19 +62,23 @@ def is_nix(): def is_win(): - """ Determine if the system is a Windows platform + """Determine if the system is a Windows platform. - Returns: - bool -- Whether the system is Windows + Returns + ------- + bool + Whether the system is Windows. """ return "win32" in platform().lower() or OS_NAME == "nt" def is_zos(): - """ Determine if the system is a z/OS distribution + """Determine if the system is a z/OS distribution. - Returns: - bool -- Whether the system is z/OS + Returns + ------- + bool + Whether the system is z/OS. """ is_zos_unix = is_posix() and not is_nix() return is_zos_unix and SYS_PLATFORM == "zos" From afa865cce14fc57b10a2ee3d2434577861d571cd Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 16 May 2024 09:15:04 -0600 Subject: [PATCH 386/495] [Documentation][zos_operator] Add and standarize docstrings on modules/zos_operator.py (#1361) * Add and standarize docstrings on modules/zos_operator.py * Create changelog fragment * Correct changelog fragment * Modified the google style to numpy * Modify google style to numpy --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1361-update-docstring-zos_operator.yml | 3 + plugins/modules/zos_operator.py | 74 ++++++++++++++++++- 2 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1361-update-docstring-zos_operator.yml diff --git a/changelogs/fragments/1361-update-docstring-zos_operator.yml b/changelogs/fragments/1361-update-docstring-zos_operator.yml new file mode 100644 index 000000000..a1b928f14 --- /dev/null +++ b/changelogs/fragments/1361-update-docstring-zos_operator.yml @@ -0,0 +1,3 @@ +trivial: + - zos_operator - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1361). \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 366285d22..d34781fac 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -178,7 +178,25 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): - + """ + Executes an operator command. + + Parameters + ---------- + operator_cmd : str + Command to execute. + timeout : int + Time until it stops whether it finished or not. + *args : dict + Some arguments to pass on. + **kwargs : dict + Some other arguments to pass on. + + Returns + ------- + tuple(int, str, str, int) + Return code, standard output, standard error and time elapsed from start to finish. + """ # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: timeout_c = 100 * timeout_s @@ -193,6 +211,19 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): def run_module(): + """Initialize the module. + + Raises + ------ + fail_json + An error ocurred while importing ZOAU. + fail_json + Expected response to be more than 2 lines. + fail_json + A non-zero return code was received. + fail_json + An unexpected error occurred. + """ module_args = dict( cmd=dict(type="str", required=True), verbose=dict(type="bool", required=False, default=False), @@ -267,6 +298,18 @@ def run_module(): def parse_params(params): + """Use BetterArgParser to parse the module parameters. + + Parameters + ---------- + params : dict + Parameters to parse. + + Returns + ------- + dict + New parameters. + """ arg_defs = dict( cmd=dict(arg_type="str", required=True), verbose=dict(arg_type="bool", required=False), @@ -278,6 +321,19 @@ def parse_params(params): def run_operator_command(params): + """Runs operator command based on a given parameters in a dictionary. + + Parameters + ---------- + params : dict + Operator command parameters to pass into the function. + + Returns + ------- + dict + Return code, standard output, standard error, the cmd call + and time elapsed from beginning to end. + """ AnsibleModuleHelper(argument_spec={}) kwargs = {} @@ -317,6 +373,22 @@ class Error(Exception): class OperatorCmdError(Error): + """Exception raised when an error occurred executing the operator command. + + Parameters + ---------- + cmd : str + Command that failed. + rc : int + Return code. + message : str + Human readable string describing the exception. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ def __init__(self, cmd, rc, message): self.msg = 'An error occurred executing the operator command "{0}", with RC={1} and response "{2}"'.format( cmd, str(rc), message From dd8f23d6789e4960947d9a5ae0681f3cb79a8c5c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 16 May 2024 12:36:41 -0600 Subject: [PATCH 387/495] [Documentation][copy] Add and standarize docstrings on modules/copy.py (#1387) * Add and standarize docstrings on modules/copy.py * Add changelog fragment * Fixed docstrings --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1387-update-docstring-copy.yml | 3 + plugins/module_utils/copy.py | 378 ++++++++++++------ 2 files changed, 269 insertions(+), 112 deletions(-) create mode 100644 changelogs/fragments/1387-update-docstring-copy.yml diff --git a/changelogs/fragments/1387-update-docstring-copy.yml b/changelogs/fragments/1387-update-docstring-copy.yml new file mode 100644 index 000000000..6891259f0 --- /dev/null +++ b/changelogs/fragments/1387-update-docstring-copy.yml @@ -0,0 +1,3 @@ +trivial: + - copy - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1387). \ No newline at end of file diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 71b47c974..13559258e 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019-2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -37,6 +37,18 @@ def _validate_data_set_name(ds): + """Validate data set name using BetterArgParser. + + Parameters + ---------- + ds : str + The source dataset. + + Returns + ------- + str + Parsed dataset. + """ arg_defs = dict(ds=dict(arg_type="data_set"),) parser = BetterArgParser(arg_defs) parsed_args = parser.parse_args({"ds": ds}) @@ -44,6 +56,18 @@ def _validate_data_set_name(ds): def _validate_path(path): + """Validate path using BetterArgParser. + + Parameters + ---------- + path : str + The path. + + Returns + ------- + str + Parsed path. + """ arg_defs = dict(path=dict(arg_type="path"),) parser = BetterArgParser(arg_defs) parsed_args = parser.parse_args({"path": path}) @@ -51,22 +75,35 @@ def _validate_path(path): def copy_uss2mvs(src, dest, ds_type, is_binary=False): - """Copy uss a file or path to an MVS data set - - Arguments: - src: {str} -- The uss file or path to be copied - dest: {str} -- The destination MVS data set, it must be a PS or PDS(E) - ds_type: {str} -- The dsorg of the dest. - - Keyword Arguments: - is_binary: {bool} -- Whether the file to be copied contains binary data - - Raises: - USSCmdExecError: When any exception is raised during the conversion. - Returns: - boolean -- The return code after the copy command executed successfully - str -- The stdout after the copy command executed successfully - str -- The stderr after the copy command executed successfully + """Copy uss a file or path to an MVS data set. + + Parameters + ---------- + src : str + The uss file or path to be copied. + dest : str + The destination MVS data set, it must be a PS or PDS(E). + ds_type : str + The dsorg of the dest. + + Keyword Parameters + ------------------ + is_binary : bool + Whether the file to be copied contains binary data. + + Returns + ------- + bool + The return code after the copy command executed successfully. + str + The stdout after the copy command executed successfully. + str + The stderr after the copy command executed successfully. + + Raises + ------ + USSCmdExecError + When any exception is raised during the conversion. """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_path(src) @@ -84,22 +121,34 @@ def copy_uss2mvs(src, dest, ds_type, is_binary=False): def copy_ps2uss(src, dest, is_binary=False): - """Copy a PS data set to a uss file - - Arguments: - src: {str} -- The MVS data set to be copied, it must be a PS data set - or a PDS(E) member - dest: {str} -- The destination uss file - - Keyword Arguments: - is_binary: {bool} -- Whether the file to be copied contains binary data - - Raises: - USSCmdExecError: When any exception is raised during the conversion - Returns: - boolean -- The return code after the copy command executed successfully - str -- The stdout after the copy command executed successfully - str -- The stderr after the copy command executed successfully + """Copy a PS data set to a uss file. + + Parameters + ---------- + src : str + The MVS data set to be copied, it must be a PS data set + or a PDS(E) member. + dest : str + The destination uss file. + + Keyword Parameters + ------------------ + is_binary : bool + Whether the file to be copied contains binary data. + + Returns + ------- + bool + The return code after the copy command executed successfully. + str + The stdout after the copy command executed successfully. + str + The stderr after the copy command executed successfully. + + Raises + ------ + USSCmdExecError + When any exception is raised during the conversion. """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_data_set_name(src) @@ -114,23 +163,36 @@ def copy_ps2uss(src, dest, is_binary=False): def copy_pds2uss(src, dest, is_binary=False, asa_text=False): - """Copy the whole PDS(E) to a uss path - - Arguments: - src: {str} -- The MVS data set to be copied, it must be a PDS(E) data set - dest: {str} -- The destination uss path - - Keyword Arguments: - is_binary: {bool} -- Whether the file to be copied contains binary data - asa_text: {bool} -- Whether the file to be copied contains ASA control - characters - - Raises: - USSCmdExecError: When any exception is raised during the conversion. - Returns: - boolean -- The return code after the USS command executed successfully - str -- The stdout after the USS command executed successfully - str -- The stderr after the USS command executed successfully + """Copy the whole PDS(E) to a uss path. + + Parameters + ---------- + src : str + The MVS data set to be copied, it must be a PDS(E) data set. + dest : str + The destination uss path. + + Keyword Parameters + ------------------ + is_binary : bool + Whether the file to be copied contains binary data. + asa_text : bool + Whether the file to be copied contains ASA control + characters. + + Returns + ------- + bool + The return code after the USS command executed successfully. + str + The stdout after the USS command executed successfully. + str + The stderr after the USS command executed successfully. + + Raises + ------ + USSCmdExecError + When any exception is raised during the conversion. """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_data_set_name(src) @@ -155,17 +217,28 @@ def copy_pds2uss(src, dest, is_binary=False, asa_text=False): def copy_uss2uss_binary(src, dest): - """Copy a USS file to a USS location in binary mode - - Arguments: - src: {str} -- The source USS path - dest: {str} -- The destination USS path - Raises: - USSCmdExecError: When any exception is raised during the conversion. - Returns: - boolean -- The return code after the USS command executed successfully - str -- The stdout after the USS command executed successfully - str -- The stderr after the USS command executed successfully + """Copy a USS file to a USS location in binary mode. + + Parameters + ---------- + src : str + The source USS path. + dest : str + The destination USS path. + + Returns + ------- + bool + The return code after the USS command executed successfully. + str + The stdout after the USS command executed successfully. + str + The stderr after the USS command executed successfully. + + Raises + ------ + USSCmdExecError + When any exception is raised during the conversion. """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_path(src) @@ -178,21 +251,33 @@ def copy_uss2uss_binary(src, dest): def copy_mvs2mvs(src, dest, is_binary=False): - """Copy an MVS source to MVS target - - Arguments: - src: {str} -- Name of source data set - dest: {str} -- Name of destination data set - - Keyword Arguments: - is_binary: {bool} -- Whether the data set to be copied contains binary data - - Raises: - USSCmdExecError: When any exception is raised during the conversion. - Returns: - boolean -- The return code after the USS command executed successfully - str -- The stdout after the USS command executed successfully - str -- The stderr after the USS command executed successfully + """Copy an MVS source to MVS target. + + Parameters + ---------- + src : str + Name of source data set. + dest : str + Name of destination data set. + + Keyword Parameters + ------------------ + is_binary : bool + Whether the data set to be copied contains binary data. + + Returns + ------- + bool + The return code after the USS command executed successfully. + str + The stdout after the USS command executed successfully. + str + The stderr after the USS command executed successfully. + + Raises + ------ + USSCmdExecError + When any exception is raised during the conversion. """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_data_set_name(src) @@ -207,18 +292,28 @@ def copy_mvs2mvs(src, dest, is_binary=False): def copy_vsam_ps(src, dest): - """Copy a VSAM(KSDS) data set to a PS data set vise versa - - Arguments: - src: {str} -- The VSAM(KSDS) or PS data set to be copied - dest: {str} -- The PS or VSAM(KSDS) data set - - Raises: - USSCmdExecError: When any exception is raised during the conversion - Returns: - boolean -- The return code after the USS command executed successfully - str -- The stdout after the USS command executed successfully - str -- The stderr after the USS command executed successfully + """Copy a VSAM(KSDS) data set to a PS data set vise versa. + + Parameters + ---------- + src : str + The VSAM(KSDS) or PS data set to be copied. + dest : str + The PS or VSAM(KSDS) data set. + + Returns + ------- + bool + The return code after the USS command executed successfully. + str + The stdout after the USS command executed successfully. + str + The stderr after the USS command executed successfully. + + Raises + ------ + USSCmdExecError + When any exception is raised during the conversion. """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_data_set_name(src) @@ -234,14 +329,21 @@ def copy_vsam_ps(src, dest): def copy_asa_uss2mvs(src, dest): """Copy a file from USS to an ASA sequential data set or PDS/E member. - Arguments: - src: {str} -- Path of the USS file - dest: {str} -- The MVS destination data set or member - - Returns: - boolean -- The return code after the copy command executed successfully - str -- The stdout after the copy command executed successfully - str -- The stderr after the copy command executed successfully + Parameters + ---------- + src : str + Path of the USS file. + dest : str + The MVS destination data set or member. + + Returns + ------- + bool + The return code after the copy command executed successfully. + str + The stdout after the copy command executed successfully. + str + The stderr after the copy command executed successfully. """ oget_cmd = "OGET '{0}' '{1}'".format(src, dest) rc, out, err = ikjeft01(oget_cmd, authorized=True) @@ -252,14 +354,21 @@ def copy_asa_uss2mvs(src, dest): def copy_asa_mvs2uss(src, dest): """Copy an ASA sequential data set or member to USS. - Arguments: - src: {str} -- The MVS data set to be copied - dest: {str} -- Destination path in USS - - Returns: - boolean -- The return code after the copy command executed successfully - str -- The stdout after the copy command executed successfully - str -- The stderr after the copy command executed successfully + Parameters + ---------- + src : str + The MVS data set to be copied. + dest : str + Destination path in USS. + + Returns + ------- + bool + The return code after the copy command executed successfully. + str + The stdout after the copy command executed successfully. + str + The stderr after the copy command executed successfully. """ src = _validate_data_set_name(src) dest = _validate_path(dest) @@ -273,14 +382,21 @@ def copy_asa_mvs2uss(src, dest): def copy_asa_pds2uss(src, dest): """Copy all members from an ASA PDS/E to USS. - Arguments: - src: {str} -- The MVS data set to be copied - dest: {str} -- Destination path in USS (must be a directory) - - Returns: - boolean -- The return code after the copy command executed successfully - str -- The stdout after the copy command executed successfully - str -- The stderr after the copy command executed successfully + Parameters + ---------- + src : str + The MVS data set to be copied. + dest : str + Destination path in USS (must be a directory). + + Returns + ------- + bool + The return code after the copy command executed successfully. + str + The stdout after the copy command executed successfully. + str + The stderr after the copy command executed successfully. """ from os import path import traceback @@ -311,6 +427,26 @@ def copy_asa_pds2uss(src, dest): class TSOCmdResponse(): def __init__(self, rc, stdout, stderr): + """Builds TSO cmd response. + + Parameters + ---------- + rc : int + Return code. + stdout : str + Standard output. + stderr : str + Standard error. + + Attributes + ---------- + rc : int + Return code. + stdout : str + Standard output. + stderr : str + Standard error. + """ self.rc = rc self.stdout_response = stdout self.stderr_response = stderr @@ -318,6 +454,24 @@ def __init__(self, rc, stdout, stderr): class USSCmdExecError(Exception): def __init__(self, uss_cmd, rc, out, err): + """Error during USS cmd execution. + + Parameters + ---------- + uss_cmd : str + USS command. + rc : int + Return code. + out : str + Standard output. + err : str + Standard error. + + Attributes + ---------- + msg : str + Human readable string describing the exception. + """ self.msg = ( "Failed during execution of usscmd: {0}, Return code: {1}; " "stdout: {2}; stderr: {3}".format(uss_cmd, rc, out, err) From 6499a816580e127a24b4ec0fb0d70a54b7d68307 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 16 May 2024 12:38:27 -0600 Subject: [PATCH 388/495] [Documentation][zos_blockinline] Standarize and add docstrings on modules/zos_blockinline.py (#1343) * Standarize doc-strings on modules/zos_blockinline.py * Add a docstring on modules/zos_blockinline.quotedString() * Create changelog fragment * Modify google style to numpy * Standarize numpy style * Update zos_blockinfile.py added blank line at 457 to address pep8 error --------- Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1343-update-docstring-zos_blockinline.yml | 3 + plugins/modules/zos_blockinfile.py | 140 +++++++++++++----- 2 files changed, 103 insertions(+), 40 deletions(-) create mode 100644 changelogs/fragments/1343-update-docstring-zos_blockinline.yml diff --git a/changelogs/fragments/1343-update-docstring-zos_blockinline.yml b/changelogs/fragments/1343-update-docstring-zos_blockinline.yml new file mode 100644 index 000000000..570caa06f --- /dev/null +++ b/changelogs/fragments/1343-update-docstring-zos_blockinline.yml @@ -0,0 +1,3 @@ +trivial: + - zos_blockinline - Updated docstrings to numpy style for visual aid to developers. + (https://github.com/ansible-collections/ibm_zos_core/pull/1343). \ No newline at end of file diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 88f410cdb..775809230 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -344,13 +344,21 @@ def transformBlock(block, indentation_char, indentation_spaces): - """Prepends the specified number of spaces to the block in all lines - Arguments: - block: {str} -- The block text to be transformed. - indentation_char: {str} -- The indentation char to be used. - indentation_spaces: {int} -- Number of times the indentation char to prepend. - Returns: - block: {str} -- The text block after applying the necessary transformations. + """Prepends the specified number of spaces to the block in all lines. + + Parameters + ---------- + block : str + The block text to be transformed. + indentation_char : str + The indentation char to be used. + indentation_spaces : int + Number of times the indentation char to prepend. + + Returns + ------- + str + The text block after applying the necessary transformations. """ blocklines = block.splitlines() # Prepend spaces transformation @@ -364,50 +372,83 @@ def transformBlock(block, indentation_char, indentation_spaces): def present(src, block, marker, ins_aft, ins_bef, encoding, force): - """Replace a block with the matching regex pattern - Insert a block before/after the matching pattern - Insert a block at BOF/EOF - Arguments: - src: {str} -- The z/OS USS file or data set to modify. - block: {str} -- The block to insert/replace into the src. - marker: {str} -- The block will be inserted/updated with the markers. - ins_aft: {str} -- Insert the block after matching '*regex*' pattern or EOF. - choices: - - EOF - - '*regex*' - ins_bef: {str} -- Insert the block before matching '*regex*' pattern or BOF. - choices: - - BOF - - '*regex*' - encoding: {str} -- Encoding of the src. - force: {bool} -- If not empty passes True option to dmod cmd. - Returns: - str -- Information in JSON format. keys: - cmd: {str} -- dmod shell command - found: {int} -- Number of matching regex pattern - changed: {bool} -- Indicates if the destination was modified. + """Replace a block with the matching regex pattern. + Insert a block before/after the matching pattern. + Insert a block at BOF/EOF. + + Parameters + ---------- + src : str + The z/OS USS file or data set to modify. + block : str + The block to insert/replace into the src. + marker : str + The block will be inserted/updated with the markers. + ins_aft : str + Insert the block after matching '*regex*' pattern or EOF. + choices: + - EOF + - '*regex*' + ins_bef : str + Insert the block before matching '*regex*' pattern or BOF. + choices: + - BOF + - '*regex*' + encoding : str + Encoding of the src. + force : bool + If not empty passes True option to dmod cmd. + + Returns + ------- + str + Information in JSON format. keys: + cmd {str} -- dmod shell command. + found {int} -- Number of matching regex pattern. + changed {bool} -- Indicates if the destination was modified. """ return datasets.blockinfile(src, True, block=block, marker=marker, insert_after=ins_aft, insert_before=ins_bef, encoding=encoding, force=force, as_json=True) def absent(src, marker, encoding, force): - """Delete blocks with matching regex pattern - Arguments: - src: {str} -- The z/OS USS file or data set to modify. - marker: {str} -- Identifies the block to be removed. - encoding: {str} -- Encoding of the src. - force: {bool} -- If not empty passes the value True option to dmod cmd. - Returns: - str -- Information in JSON format. keys: - cmd: {str} -- dmod shell command - found: {int} -- Number of matching regex pattern - changed: {bool} -- Indicates if the destination was modified. + """Delete blocks with matching regex pattern. + + Parameter + --------- + src : str + The z/OS USS file or data set to modify. + marker : str + Identifies the block to be removed. + encoding : str + Encoding of the src. + force : bool + If not empty passes the value True option to dmod cmd. + + Returns + ------- + str + Information in JSON format. keys: + cmd {str} -- dmod shell command. + found {int} -- Number of matching regex pattern. + changed {bool} -- Indicates if the destination was modified. """ return datasets.blockinfile(src, False, marker=marker, encoding=encoding, force=force, as_json=True) def quotedString(string): + """Deletes the quote mark on strings. + + Parameters + ---------- + string : str + String to delete quote marks from. + + Returns + ------- + str + String without the quote marks. + """ # add escape if string was quoted if not isinstance(string, str): return string @@ -482,6 +523,25 @@ def clean_command(cmd): def main(): + """Run the zos_blockinfile module core functions. + + Raises + ------ + fail_json + Parameter verification failed. + fail_json + Block is required with state=present. + fail_json + Marker should have {mark}. + fail_json + src does NOT exist. + fail_json + Data set type is NOT supported. + fail_json + Unable to allocate backup. + fail_json + ZOAU dmod return content is NOT in json format. + """ module = AnsibleModule( argument_spec=dict( src=dict( From 826ca4b11570e4382048b6d46c1d6f4d2558019d Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Mon, 20 May 2024 06:52:46 -0700 Subject: [PATCH 389/495] [Enabler] [zos_job_submit] Add support for GDG/GDS (#1497) * modified DatasetCreatedError message * Added gdg functions * Created unit test for validating gds relative name * Updated to fail when future gen * Update arg parser * Adding gdg support for zos_data_set * Add escaping function for data set names * Add unit tests for name escaping * Remove calls to escape_data_set_name * renamed tests * Added MVSDataset class * Updated escaped symbols * Updated tests * Added utils * Add changelog * Uncommented test * Updated exception * Updated mvsdataset class * Updated class * Added type * Added gds tests * Testing for special symbols * Made data set name escaping optional * Use new class for GDS handling * Update special chars data set name * Escape dollar sign in test * Add positive test for GDG/GDS * Add negative GDG tests * Update docs * Fix data set existence check * Update docs again * Add changelog fragment * Fix merge with dev * Fix source validation * Fix validate-modules issue --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1497-gdg-support-zos-job-submit.yml | 4 + docs/source/modules/zos_job_submit.rst | 22 ++++-- plugins/module_utils/data_set.py | 6 +- plugins/modules/zos_job_submit.py | 69 +++++++++++------ .../modules/test_zos_job_submit_func.py | 77 ++++++++++++++++++- 5 files changed, 145 insertions(+), 33 deletions(-) create mode 100644 changelogs/fragments/1497-gdg-support-zos-job-submit.yml diff --git a/changelogs/fragments/1497-gdg-support-zos-job-submit.yml b/changelogs/fragments/1497-gdg-support-zos-job-submit.yml new file mode 100644 index 000000000..d03309289 --- /dev/null +++ b/changelogs/fragments/1497-gdg-support-zos-job-submit.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_job_submit - add support for generation data groups and generation + data sets as sources for jobs. + (https://github.com/ansible-collections/ibm_zos_core/pull/1497) \ No newline at end of file diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 964ab8f4b..bec95cb54 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -31,11 +31,13 @@ Parameters src The source file or data set containing the JCL to submit. - It could be a physical sequential data set, a partitioned data set qualified by a member or a path. (e.g "USER.TEST","USER.JCL(TEST)") + It could be a physical sequential data set, a partitioned data set qualified by a member or a path (e.g. \ :literal:`USER.TEST`\ , \ :literal:`USER.JCL(TEST)`\ ), or a generation data set from a generation data group (for example, \ :literal:`USER.TEST.GDG(-2)`\ ). - Or a USS file. (e.g "/u/tester/demo/sample.jcl") + Or a USS file. (e.g \ :literal:`/u/tester/demo/sample.jcl`\ ) - Or a LOCAL file in ansible control node. (e.g "/User/tester/ansible-playbook/sample.jcl") + Or a LOCAL file in ansible control node. (e.g \ :literal:`/User/tester/ansible-playbook/sample.jcl`\ ) + + When using a generation data set, only already created generations are valid. If either the relative name is positive, or negative but not found, the module will fail. | **required**: True | **type**: str @@ -44,11 +46,11 @@ src location The JCL location. Supported choices are \ :literal:`data\_set`\ , \ :literal:`uss`\ or \ :literal:`local`\ . - \ :literal:`data\_set`\ can be a PDS, PDSE, or sequential data set. + \ :literal:`data\_set`\ can be a PDS, PDSE, sequential data set, or a generation data set. \ :literal:`uss`\ means the JCL location is located in UNIX System Services (USS). - \ :literal:`local`\ means locally to the ansible control node. + \ :literal:`local`\ means locally to the Ansible control node. | **required**: False | **type**: str @@ -311,6 +313,16 @@ Examples location: data_set max_rc: 16 + - name: Submit JCL from the latest generation data set in a generation data group. + zos_job_submit: + src: HLQ.DATA.GDG(0) + location: data_set + + - name: Submit JCL from a previous generation data set in a generation data group. + zos_job_submit: + src: HLQ.DATA.GDG(-2) + location: data_set + diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index f741b5c70..bcfd057a3 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1809,6 +1809,7 @@ class MVSDataSet(): def __init__( self, name, + escape_name=False, data_set_type=None, state=None, organization=None, @@ -1858,7 +1859,8 @@ def __init__( self.is_cataloged = False # If name has escaped chars or is GDS relative name we clean it. - # self.name = DataSet.escape_data_set_name(self.name) + if escape_name: + self.name = DataSet.escape_data_set_name(self.name) if DataSet.is_gds_relative_name(self.name): try: self.name = DataSet.resolve_gds_absolute_name(self.name) @@ -1866,7 +1868,7 @@ def __init__( except Exception: # This means the generation is a positive version so is only used for creation. self.is_gds_active = False - if self.data_set_type.upper() in DataSet.MVS_VSAM or self.data_set_type == "zfs": + if self.data_set_type and (self.data_set_type.upper() in DataSet.MVS_VSAM or self.data_set_type == "zfs"): # When trying to create a new VSAM with a specified record format will fail # with ZOAU self.record_format = None diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index bb3aac1ab..ddbb069ff 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -36,10 +36,15 @@ description: - The source file or data set containing the JCL to submit. - It could be a physical sequential data set, a partitioned data set - qualified by a member or a path. (e.g "USER.TEST","USER.JCL(TEST)") - - Or a USS file. (e.g "/u/tester/demo/sample.jcl") + qualified by a member or a path (e.g. C(USER.TEST), V(USER.JCL(TEST\))), + or a generation data set from a generation data group + (for example, V(USER.TEST.GDG(-2\))). + - Or a USS file. (e.g C(/u/tester/demo/sample.jcl)) - Or a LOCAL file in ansible control node. - (e.g "/User/tester/ansible-playbook/sample.jcl") + (e.g C(/User/tester/ansible-playbook/sample.jcl)) + - When using a generation data set, only already created generations + are valid. If either the relative name is positive, or negative but + not found, the module will fail. location: required: false default: data_set @@ -50,9 +55,9 @@ - local description: - The JCL location. Supported choices are C(data_set), C(uss) or C(local). - - C(data_set) can be a PDS, PDSE, or sequential data set. + - C(data_set) can be a PDS, PDSE, sequential data set, or a generation data set. - C(uss) means the JCL location is located in UNIX System Services (USS). - - C(local) means locally to the ansible control node. + - C(local) means locally to the Ansible control node. wait_time_s: required: false default: 10 @@ -601,6 +606,16 @@ src: HLQ.DATA.LLQ location: data_set max_rc: 16 + +- name: Submit JCL from the latest generation data set in a generation data group. + zos_job_submit: + src: HLQ.DATA.GDG(0) + location: data_set + +- name: Submit JCL from a previous generation data set in a generation data group. + zos_job_submit: + src: HLQ.DATA.GDG(-2) + location: data_set """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.encode import ( @@ -647,7 +662,7 @@ MAX_WAIT_TIME_S = 86400 -def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=None, start_time=timer()): +def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, start_time=timer()): """Submit src JCL whether JCL is local (Ansible Controller), USS or in a data set. Parameters @@ -666,9 +681,6 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N True if JCL is a file in USS, otherwise False; Note that all JCL local to a controller is transfered to USS thus would be True. - volume : str - volume the data set JCL is located on that will be cataloged before - being submitted. start_time : int time the JCL started its submission. @@ -704,20 +716,6 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, volume=N result = {} try: - if volume is not None: - volumes = [volume] - # Get the PDS name to catalog it - src_ds_name = data_set.extract_dsname(src) - present, changed = DataSet.attempt_catalog_if_necessary( - src_ds_name, volumes) - - if not present: - result["changed"] = False - result["failed"] = True - result["msg"] = ("Unable to submit job {0} because the data set could " - "not be cataloged on the volume {1}.".format(src, volume)) - module.fail_json(**result) - job_submitted = jobs.submit(src, is_unix=is_unix, **kwargs) # Introducing a sleep to ensure we have the result of job submit carrying the job id. @@ -952,9 +950,32 @@ def run_module(): job_submitted_id = None duration = 0 start_time = timer() + if location == "data_set": + # Resolving a relative GDS name and escaping special symbols if needed. + src_data = data_set.MVSDataSet(src) + + # Checking that the source is actually present on the system. + if volume is not None: + volumes = [volume] + # Get the data set name to catalog it. + src_ds_name = data_set.extract_dsname(src_data.name) + present, changed = DataSet.attempt_catalog_if_necessary(src_ds_name, volumes) + + if not present: + module.fail_json( + msg=(f"Unable to submit job {src_data.name} because the data set could " + f"not be cataloged on the volume {volume}.") + ) + elif data_set.is_member(src_data.name): + if not DataSet.data_set_member_exists(src_data.name): + module.fail_json(msg=f"Cannot submit job, the data set member {src_data.raw_name} was not found.") + else: + if not DataSet.data_set_exists(src_data.name): + module.fail_json(msg=f"Cannot submit job, the data set {src_data.raw_name} was not found.") + job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, is_unix=False, volume=volume, start_time=start_time) + module, src_data.name, src_name=src_data.raw_name, timeout=wait_time_s, is_unix=False, start_time=start_time) elif location == "uss": job_submitted_id, duration = submit_src_jcl( module, src, src_name=src, timeout=wait_time_s, is_unix=True) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 1e231f60d..c306b1450 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -399,7 +399,7 @@ """ TEMP_PATH = "/tmp/jcl" -DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.xxx05" +DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.x#$xx05" @pytest.mark.parametrize( "location", [ @@ -460,7 +460,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format( - TEMP_PATH, DATA_SET_NAME_SPECIAL_CHARS + TEMP_PATH, DATA_SET_NAME_SPECIAL_CHARS.replace('$', '\$') ) ) results = hosts.all.zos_job_submit( @@ -922,6 +922,79 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("msg_code") is None +@pytest.mark.parametrize("generation", ["0", "-1"]) +def test_job_from_gdg_source(ansible_zos_module, generation): + hosts = ansible_zos_module + + try: + # Creating a GDG for the test. + source = get_tmp_ds_name() + gds_name = f"{source}({generation})" + hosts.all.zos_data_set(name=source, state="present", type="gdg", limit=3) + hosts.all.zos_data_set(name=f"{source}(+1)", state="present", type="seq") + hosts.all.zos_data_set(name=f"{source}(+1)", state="present", type="seq") + + # Copying the JCL to the GDS. + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + ) + hosts.all.shell( + cmd="dcp '{0}/SAMPLE' '{1}'".format(TEMP_PATH, gds_name) + ) + + results = hosts.all.zos_job_submit(src=gds_name, location="data_set") + for result in results.contacted.values(): + assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" + assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True + finally: + hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.zos_data_set(name=f"{source}(0)", state="absent") + hosts.all.zos_data_set(name=f"{source}(-1)", state="absent") + hosts.all.zos_data_set(name=source, state="absent") + + +def test_inexistent_negative_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + # Creating a GDG for the test. + source = get_tmp_ds_name() + gds_name = f"{source}(-1)" + hosts.all.zos_data_set(name=source, state="present", type="gdg", limit=3) + # Only creating generation 0. + hosts.all.zos_data_set(name=f"{source}(+1)", state="present", type="seq") + + results = hosts.all.zos_job_submit(src=gds_name, location="data_set") + for result in results.contacted.values(): + assert result.get("changed") is False + assert "was not found" in result.get("msg") + finally: + hosts.all.zos_data_set(name=f"{source}(0)", state="absent") + hosts.all.zos_data_set(name=source, state="absent") + + +def test_inexistent_positive_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + # Creating a GDG for the test. + source = get_tmp_ds_name() + gds_name = f"{source}(+1)" + hosts.all.zos_data_set(name=source, state="present", type="gdg", limit=3) + # Only creating generation 0. + hosts.all.zos_data_set(name=gds_name, state="present", type="seq") + + results = hosts.all.zos_job_submit(src=gds_name, location="data_set") + for result in results.contacted.values(): + assert result.get("changed") is False + assert "was not found" in result.get("msg") + finally: + hosts.all.zos_data_set(name=f"{source}(0)", state="absent") + hosts.all.zos_data_set(name=source, state="absent") + + # This test case is related to the following GitHub issues: # - https://github.com/ansible-collections/ibm_zos_core/issues/677 # - https://github.com/ansible-collections/ibm_zos_core/issues/972 From 30d7adc7bc2aa4e5118937e81d8de222b4902ff7 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Wed, 22 May 2024 09:36:17 -0700 Subject: [PATCH 390/495] Merge main 1.10.0 beta.1 into dev (#1510) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Staging v1.7.0 beta.1 (#915) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan… * Staging v1.7.0 beta.2 (#939) * Enhancement/866 archive (#930) * Added action plugin zos_unarchive * Added zos_archive changes * Added zos_unarchive changes * Added zos_archive tests changes * Added test zos_unarchive changes * Added zos_archive changes * fixed pep8 issues * Changed source to src in docs * Added correct copyright year * Updated docs * Added changelog fragments * Updated docs * Updated galaxy.yml * Updated meta * Updated docs * Added zos_gather_facts rst * Added changelog * Added release notes * Changed variable name to avoid shadowing import * Delete 930-archive-post-beta.yml * Delete v1.7.0-beta.2_summary.yml * Staging v1.7.0 merge to main (#1019) * Galaxy 1.7 updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update meta runtime to support ansible-core 2.14 or later Signed-off-by: ddimatos <dimatos@gmail.com> * Update ibm_zos_core_meta.yml with updated version Signed-off-by: ddimatos <dimatos@gmail.com> * Update readme to align to supported ansible versions and new urls Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional sanity ignore files to the exclude list Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional sanity ignore files to the exclude list for ansible-lint. Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright yrs for source files that were overlooked Signed-off-by: ddimatos <dimatos@gmail.com> * Remove requirements from module doc, rely on offerings minimum requirements, also zoau 1.2.1 never was supported Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog summary for 1.7 Signed-off-by: ddimatos <dimatos@gmail.com> * Adding generated antsibull-changelog release changelog and artifacts Signed-off-by: ddimatos <dimatos@gmail.com> * Remove v1.7.0_summary, its no longer needed Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes for ac 1.7.0 Signed-off-by: ddimatos <dimatos@gmail.com> * Remove unsupported collection versions requiring a version of zoau that is EOS Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Staging v1.8.0 beta.1 (#1037) * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Update ibm_zos_core_meta.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Bugfix/619/mode set for files applied test case (#757) * Add test case for copy d… * Merge staging v1.8.0 into main branch (#1093) * [v1.8.0][Backport] Clean temporary data sets created during XMIT unarchive operation (#1054) * Clean temporary data sets created during XMIT unarchive operation (#1049) * Added a temp cleanup * Added changelog * Modified changelog * Added removal of src if remote_src is False Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Modified changelog fragments --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Update 1049-xmit-temporary-data-sets.yml modified PR number --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Cherry picked v1.8.0 (#1063) * Bug 1041 zos submit job honor return output literally (#1058) * initial commit to pass return_output to job_output. * corrected fragment name to match branch * tweaked data set test to show result values if positive test fails * removed trace in zos_data_set, and added trace output to job_submit * removed extra text from functional testing. * put in correct PR number in changelog fragment. * changed trivial to minor_changes, added documentation to dd_scan in job:job_output. * 1043 bug title zos operator is passing wrong value to zoauopercmd (#1044) * corrected kwarg index value from 'wait_arg' to 'wait' Also corrected true/false issue in zoaq * Added and updated changelog. * update PR number in changelog fragment * changed test from \$ to \\$ to eliminate warning * added blocking test to maks sure minimum wait is reached in zoau>1.2.4.5 * removed the else condition from the blocking test, since it is not needed. * corrected tense grammer in changelog fragment * corrected capitalization of ZOAU in changelog fragment. * updated changelog to point to the backport PR * [v1.8.0] [Backport] [zos_script] remote_tmp for zos_script (#1068) * Enabler/1024/remote_tmp for zos_script (#1060) * Changed tmp_path for Ansible's remote_tmp * Remove tmp_path from module's options * Update module documentation * Remove tmp_path test case * Update zos_script's RST file * Add changelog fragment * Updated module examples * Update changelog fragment * [v1.8.0][zos_job_submit] Removes tmp files left behind by zos_job_submit (#1070) * Ensure proper cleanup for ansiballz * Added proper removal of AnsiballZ * [v1.8.0][zos_copy][backport] File wrongly modified after second copy (#1069) * [zos_copy] Files corrupted after second copy (#1064) * Initial change to replace shutil.copy * Added fix for corrupted directory copies * Added changelog fragment * Modified docstring and fixed copy_tree * Added punctiation * Added copystat * Added set mode for dirs * Update 1064-corruped-second-copy.yml * Updated changelog * [v1.8.0] [backport] [Documentation] [zos_tso_command] Add REXX exec example (#1072) * [Documentation] [zos_tso_command] Add REXX exec example (#1065) * Add REXX exec example * Add fragment * Update module documentation * Fix PR link * Reword example task name * Updated REXX example * Update changelog fragment * Update RST file --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * [v1.8.0] [Backport] [zos_copy] mvs to non existent mvs copy verify destination attrs match (#1067) * Add changes * Add fragment * Modify fragment * Modify fragment * [zos_copy] fix for executables copied from local fail with iconv error (#1079) * Added fix for executables copied from local and test * Added changelog * update link in managed_node doc (#1089) * update link in managed_node doc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Updated docs regarding managed node Signed-off-by: ddimatos <dimatos@gmail.com> * Doc fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * Merge staging-v1.8.0-tasks into staging-v1.8.0 (#1090) * Modified galaxy version * mofidied meta * Update copyright year * Generated module docs * Created changelog * Removed changelog fragments * Updated changelog and release notes * Fixed newline sequences * Update CHANGELOG.rst * Update CHANGELOG.rst * Corrected release notes --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * Merge Staging release v1.9.0 beta.1 into main (#1205) * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Update ibm_zos_core_meta.yml --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Bugfix/619/mode set for files applied test case (#757) * Add test case for copy dest file * Add comments * Add test for folders * Adjust spaces * Changes for ensure consistency for all tests * Changes of name and clean creations --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Bugfix/381/failed when the job name was null or not found (#747) * Add the verbose for failed when job name was null or not found * Adjust message for what we can get * Whitespaces move * Add code from dev * Ecode utility as is in dev * Year for copyright * Case for having both the jod_id and job_name * Ecode utils functions not in my branch * Add final line ecode * Add fragment * Delete encode function two times, adjust job message and change the fragment * Change variable name for one more descriptive * Restore encode and change one word * Encode * bugfixes * Set up as dev * Better fragment --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Bugfix/660/zos operator reported failure caused by unrelated error response messages (#762) * Add options * Add transparency on the response and test cases * Solve spaces * Add validation to append * Fragment Added * Adjust fail_json on non_zero response * Identation mistakes solved * Solve last idenation problem * Replace prior tooling (makefile) that aidded the development workflow with a new 'ac' command. (#766) * Make file mount script helper Signed-off-by: ddimatos <dimatos@gmail.com> * Comments to mount script Signed-off-by: ddimatos <dimatos@gmail.com> * Staged updated scripts for makefile usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update mount scripts for use with makefile Signed-off-by: ddimatos <dimatos@gmail.com> * updates to correct mounts and add function to mounts-datasets Signed-off-by: ddimatos <dimatos@gmail.com> * adding completed new ac command files for development Signed-off-by: ddimatos <dimatos@gmail.com> * update ignore to more specific with venv Signed-off-by: ddimatos <dimatos@gmail.com> * Correcting ignore to allow for venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * moved logic that checks for info.env to venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Adding changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a path issue when calling venv.sh Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes issue not being able to run all tests, fixes issue with content being written to collections folder Signed-off-by: ddimatos <dimatos@gmail.com> * Support zSH and update scp to fall back to legacy scp protocal Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac with password usage Signed-off-by: ddimatos <dimatos@gmail.com> * Fix incorrect message and remove the cd's before and after ac-test Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * 347 new query fields (#778) * changing job.py to return 7 more fields, and for zos_job_query to pass them through * corrected testing to pull all new values through this assumes zoau 1.2.3 and z/OS at least 2.4 need to test older zoau to make sure this will still work * Added zoau version testing import to job.py so it won't reach for non-existent members. * pep8 and lint required changes * changed test to see if it will pass unit testing * Modified test_zos_data_set_func to skip HFS test if zOS > 02.04 * changed OS test for hfs usage * corrected usage of 'hosts'... removed the definition in prior edit. * changing OS version checker * corrected string extraction for OS version checker * added delete shell to 196/197 (finally of cat/uncat test) removed success message from 830 (version test logic) * removed the mvscmdauth call, as it coincides with some new test failures. * added changed=false back into testing of job_query * correction of zos->zoau name in comments. * Missing fragment in PR 778 New query fields (#780) * added fragment for pr 778 * Added changelog fragment query new fields Added changelog fragment query new fields * Update 778-query-new-fields.yml * Update docs with ansible/ansible-core version, AAP and fix the dated git issue templates (#771) * Doc vesion updates Signed-off-by: ddimatos <dimatos@gmail.com> * Repository template updates and future proofing Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Formatting corrections for release notes Signed-off-by: ddimatos <dimatos@gmail.com> * Upate issue templates with newer version of software Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac command supporting files (#789) * Update ac command supporting files Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_data_set module member description Signed-off-by: ddimatos <dimatos@gmail.com> * Add recently changed module doc from prior commits Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Adding fix for uncataloged vsam and non-vsam data sets Signed-off-by: ddimatos <dimatos@gmail.com> * Encode files recursively and test case for keep behavior. (#772) * Bring the jinja2 solution to dev and add test case * Add fragment * Solve problem z/OS 2.5 HFS * Declaration error solve * Need to check the validation with HFS * Ensure validating z/OS work with HFS * Change inecesary changes and fragments q * Return all test cases to normal * Return all test cases to normal * Create the local test case * Add local test case and change test case to be acurate * Get better cleanup of test-case * Update test_zos_data_set_func.py Equalize test mount func * Update ac to support a single test (#793) * Update ac to support a single test Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update test description Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Return the dynamically created destination attributes (#773) * First iteration to get dynamic values * Spaces and lines rectified * Add validation and extra variable to ensure consistency * Whitespaces * Change imports in test_zos_mount_func * Update test_zos_fetch_func imports * Update all imports for pipelines runs * Revert "Update all imports for pipelines runs" This reverts commit 1b370a2ba3c0001c316e0121ddab82ae7cc6d75d. Return one commit * Update data_set.py imports * Revert "Update data_set.py imports" This reverts commit 37561b0a12e04faaee8307a5541b71469dbe721d. * Update data_set imports * Update data_set imports * Update data_set imports * Restore import * Restore the imports * Add fragment * Solve a typo * Solve z/OS 2.5 HFS * Solve declaration error * Solve HFS and solution by now * Ensure HFS working with HFS * Better working on HFS testing problems * Change to cover many cases and add test * Modified changelog, corrected typos and shortemed file name * Delete 773-Return-the-dynamically-created-destintation-attributres.yaml * Update test_zos_data_set_func.py * Add documentation * Adjust spaces * Solve spaces in documentation * Solve problems on spaces in documentation * Adjust fragment and add validation for vsams * Better redaction to documentation * Solve spaces * Change documentation of code and collection * Change words in documentation --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Updated ac command to clean up the collections directory Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes the issue of parts of a vsam cluster remaining behind and allows user to correctly delete DS not in cat Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volume Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volumegit Signed-off-by: ddimatos <dimatos@gmail.com> * Unbound local var fix Signed-off-by: ddimatos <dimatos@gmail.com> * added changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog based on PR feedback Signed-off-by: ddimatos <dimatos@gmail.com> * Increase ansible supported version to 2.15 Signed-off-by: ddimatos <dimatos@gmail.com> * remove unused imports Signed-off-by: ddimatos <dimatos@gmail.com> * Added 2.16 ignore since our pipeline supports devel which is at this time 2.16 Signed-off-by: ddimatos <dimatos@gmail.com> * Change the line for the functional one (#805) * Add ansible-lint tooling added (#812) * Add ansible-lint tooling Signed-off-by: ddimatos <dimatos@gmail.com> * add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * missing hyphen from command doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac command with doc corrections Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * 439 addf (#821) * initial changes to support F format * adding F option, added basic test looking for failure during ensure-present * added print_results to a failing uncatalog test. * adding more preint_result statements to track down cataloging issue * removed other print statements, added one back (cataloging is just plain finicky) * corrected volume name on new test * removed extra print statement from test code. Added Changelog fragment. * Expanded test case to try 1 of each record format creation. Added mention of 'F' into the documentation of record_format in dataset.py * Bugfix/769/mode option does not behave the same way that it does in the community module (#795) * First suggestion * Add files to be overwriten to the files to be changed * Add functionality to test case to ensure behaivour * Add test case for keep behaivour * Delete test repetition * Delete test case from other branch * Change test cases to ensure works as ansible module * Add fragment and change variable names for clarity * Get better test case and comments * Restore test --------- Co-authored-by: Demetri <dimatos@gmail.com> * bugfix/823/Return destination attributes had hardcoded type and record format (#824) * Add solution * Add fragment * Bufix: Fixes zos_copy and zos_fetch deprecation msg for using _play_context.verbosity (#806) * Add new test case for verbosity check Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy tests to support display.verbosity and nested encoding Signed-off-by: ddimatos <dimatos@gmail.com> * Update test framewor to provide support for adhoc module calls Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_fetch plugin to use the display.verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Lint correction Signed-off-by: ddimatos <dimatos@gmail.com> * Changlog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Change from shell to raw module usage Signed-off-by: ddimatos <dimatos@gmail.com> * remove verbosity from test Signed-off-by: ddimatos <dimatos@gmail.com> * correct indentation Signed-off-by: ddimatos <dimatos@gmail.com> * update changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Stagging v1.6.0 merge into dev (#832) * Update branch produc… * Remove changelog fragments not needed left in main Signed-off-by: ddimatos <dimatos@gmail.com> * Release v1.9.0 into Main (#1306) * Added 2.16 ignore since our pipeline supports devel which is at this time 2.16 Signed-off-by: ddimatos <dimatos@gmail.com> * Change the line for the functional one (#805) * Add ansible-lint tooling added (#812) * Add ansible-lint tooling Signed-off-by: ddimatos <dimatos@gmail.com> * add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * missing hyphen from command doc Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac command with doc corrections Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * 439 addf (#821) * initial changes to support F format * adding F option, added basic test looking for failure during ensure-present * added print_results to a failing uncatalog test. * adding more preint_result statements to track down cataloging issue * removed other print statements, added one back (cataloging is just plain finicky) * corrected volume name on new test * removed extra print statement from test code. Added Changelog fragment. * Expanded test case to try 1 of each record format creation. Added mention of 'F' into the documentation of record_format in dataset.py * Bugfix/769/mode option does not behave the same way that it does in the community module (#795) * First suggestion * Add files to be overwriten to the files to be changed * Add functionality to test case to ensure behaivour * Add test case for keep behaivour * Delete test repetition * Delete test case from other branch * Change test cases to ensure works as ansible module * Add fragment and change variable names for clarity * Get better test case and comments * Restore test --------- Co-authored-by: Demetri <dimatos@gmail.com> * bugfix/823/Return destination attributes had hardcoded type and record format (#824) * Add solution * Add fragment * Bufix: Fixes zos_copy and zos_fetch deprecation msg for using _play_context.verbosity (#806) * Add new test case for verbosity check Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy tests to support display.verbosity and nested encoding Signed-off-by: ddimatos <dimatos@gmail.com> * Update test framewor to provide support for adhoc module calls Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_fetch plugin to use the display.verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Lint correction Signed-off-by: ddimatos <dimatos@gmail.com> * Changlog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Change from shell to raw module usage Signed-off-by: ddimatos <dimatos@gmail.com> * remove verbosity from test Signed-off-by: ddimatos <dimatos@gmail.com> * correct indentation Signed-off-by: ddimatos <dimatos@gmail.com> * update changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Stagging v1.6.0 merge into dev (#832) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * update galaxy and meta/ files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update rst doc files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add 1.6.0-beta.1 release summary fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * generate changelog Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add release notes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add additional stylizing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Delete fragements once changelog is generated Signed-off-by: ddimatos <dimatos@gmail.com> * update stylizing around *none* in zos_volume_init docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * update zoau version checker and add unit testing Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Bugfix/769/1.6/zos copy does not overwrite permission on overwriten copy as comunity module (#790) * Change function behaivour, variables names and add test case to ensure result * Change test assertion to ensure wokrs as ansible module * Change test for the HSF * Add fragment * More readable test and better coments * add changelog fragment for zoau version checker bugfix (#800) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Add ansible-lint tooling added (#813) * Add ansible-lint tooling Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: ac galaxy.yml meta/runtime.yml Changes to be committed: new file: .ansible-lint new file: ac modified: galaxy.yml modified: meta/runtime.yml * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Removing file brought it in by cherry pick Signed-off-by: ddimatos <dimatos@gmail.com> * Added missing keyword 'build_ignore' Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix for #807 - zos_copy ignores encoding for binary files (#810) * Updated normalization condition * Added test cases for bugfix * Added changelog fragment * Updated changelog fragment * Update zos_data_set member description created (#816) * Update zos_data_set module member description Signed-off-by: ddimatos <dimatos@gmail.com> * Adding fix for uncataloged vsam and non-vsam data sets Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes the issue of parts of a vsam cluster remaining behind and allows user to correctly delete DS not in cat Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volume Signed-off-by: ddimatos <dimatos@gmail.com> * Unbound local var fix Signed-off-by: ddimatos <dimatos@gmail.com> * Lint corrections Signed-off-by: ddimatos <dimatos@gmail.com> * remove unused imports Signed-off-by: ddimatos <dimatos@gmail.com> * Added 2.16 ignore since our pipeline supports devel which is at this time 2.16 Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to explain data set deltion for given volumegit Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update grammar issue Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix: Fixes zos_copy and zos_fetch deprecation msg for using _play_context.verbosity (#814) * Add new test case for verbosity check Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy tests to support display.verbosity and nested encoding Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: tests/functional/modules/test_zos_copy_func.py Changes to be committed: deleted: tests/functional/modules/test_module_display.py modified: tests/functional/modules/test_zos_copy_func.py * Update test framewor to provide support for adhoc module calls Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_fetch plugin to use the display.verbosity Signed-off-by: ddimatos <dimatos@gmail.com> * Lint correction Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with verbosity Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: tests/functional/modules/test_zos_copy_func.py Changes to be committed: modified: tests/functional/modules/test_zos_copy_func.py * Change from shell to raw module usage Signed-off-by: ddimatos <dimatos@gmail.com> Conflicts: tests/functional/modules/test_zos_copy_func.py Changes to be committed: modified: tests/functional/modules/test_zos_copy_func.py * changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment change Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Release tasks v1.6.0 (#829) * Galaxy for 1.6 Signed-off-by: ddimatos <dimatos@gmail.com> * Update ansible-core meta runtime Signed-off-by: ddimatos <dimatos@gmail.com> * Update collections private meta Signed-off-by: ddimatos <dimatos@gmail.com> * Missing trailing empty line Signed-off-by: ddimatos <dimatos@gmail.com> * Update readme with volume init info Signed-off-by: ddimatos <dimatos@gmail.com> * Update lint to match galaxy build_ignore Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog summary Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog Signed-off-by: ddimatos <dimatos@gmail.com> * Minor manual CHANGELOG updates Signed-off-by: ddimatos <dimatos@gmail.com> * Relase notes updated Signed-off-by: ddimatos <dimatos@gmail.com> * Update build_ignore to skip importer_result.json Signed-off-by: ddimatos <dimatos@gmail.com> * update galaxy build_ignore Signed-off-by: ddimatos <dimatos@gmail.com> * update galaxy build_ignore Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up changelog fragements no longer need post releasing 1.6 Signed-off-by: ddimatos <dimatos@gmail.com> * delete older profile, not needed Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> * Bug fix, zos_copy returns an error message when a concurrent copy fails (#794) * Test case to validate bug does not happen * First iteration for solutions * First proposal to validate_disposition * Remove unecesary test * Solvin unecesary code * Cover all cases with bug or false positive * Add test case to ensure behaviour * Get the better version of test case * Add fragment * Solve identation * Solve identation * Solve identation * Solve error in cleanup folders * Change function name * Change variables names * Solve wrote and write * Update changelog entry Signed-off-by: ddimatos <dimatos@gmail.com> * Better verbose and function name * Better message * Solve certification tests * Clearer and eficient version * continuation line over-indented solve * continuation line over-indented solve --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * Enhancement/428/jinja2 template support (#667) * Added Jinja2 support for single files in zos_copy * Fixed cleanup of rendered templates * Added templating for directories * Fixed encoding conversion for directories (#616) While working on adding Jinja2 support for directories, I noticed that files that were in subdirectories would show garbage in a shell, which reminded me of issue #616. After implementing their suggestion, files now show the rendered templates. * Disabled rendering for remote sources * Enhanced exception handling for templates * Added encoding normalization for templates * Added templating tests to zos_copy * Added templating support to zos_job_submit * Fixed missing newline * Added failure when using templates with remote sources * Fixed encoding normalization * Fixed sanity tests issues * Added Jinja2 tests to zos_job_submit * Updated template test for zos_job_submit * Fixed template environment creation * Refactored template environment creation * Fixed initial size computation for local files * Fixed merge mistakes * Updated description for trim_blocks * Updated docs * Added test for Jinja2 loops * Added changelog fragment * Removed duplicated function * Changed try-except blocks used in action plugins * Updated template docs with variable precedence * Changed dictionary update in action plugin * Added another encoding for template tests * Fixed import errors in template.py * Fixed import error in Python 2.7 * Fixed bandit issue * Fixed template cleanup * Updated documentation --------- Co-authored-by: Demetri <dimatos@gmail.com> * Module zos_unarchive & zos_archive (#755) * Initial commit * Created template for zos_unarchive * Initial boiler plate for unarchive * Added unarchive handler selection * Added file/data set search * Adding AMATERSE unpacking * Added support for terse unarchive * Remove zos_archive module from the branch * Adding RECEIVE/XMIT support * Added temporary dataset removal * Adding RECEIVE as an mvscmd * Add RECEIVE using mvs_cmd * Add unpacked datasets display * Added display of unpacked targets support for multiple data sets * Added alias to options * Added include/ exclude options * Added include for xmit and terse * Modified include for all * Adding volume selection for SMS managed * Added list support for MVS archives * Removed unused var * Add force support for mvs data sets * Add archive listing for tar, bz2, and gz * Add unarchive all for tar, gz and bz2 * Added include/exclude support for tar, gz, bz2 * Add mvs_cmd amaterse * Modify mvs_cmd call * Add archive.py * Add latest zos_archive * Refactor mvs_cmd * Remove comments * Adding tests for zos_archive * Added unzip * Added arcroot logic and tarfile * Added changed logic * lastest changes * Multiple changes to zos_archive and zos_unarchive and its tests * Added support for pax * added list tests * Created action plugin for zos_unarchive * Add support for remove * Adding tests for exclusion list USS * Added tests * Add dest_data_set info * Adding multiple test cases and updated docs * Added test for mode * Removed unused stuff * Modified XMIT command * Added expanded paths support for mvs data sets * Added ugly multiple data set tests * Added various new tests for mvs data sets * Added new tests and default dest data set * Added default dest data set * Fixed pep8 issues * Added docs * Added docs * Fixed various sanity issues * removed abc.ABC * Added filters * modified fdict * polish test cases * Added tests * Added record length change * Fixed record length error * fix pylint issue * Add env cleaning in when terse or xmit fails * Moved locking tests below * Added tests for multiple data set in unarchive and modified test archive and unarchive names * Added tests for zos_unarchive * Added replace into zos_data_set invocation * Added replace * Added docs * Added * Added docs * Cleaned docs * Added permission change in zos_unarchive * Added mode support for USS files * Added is_binary option * Added zos_archive and zos_unarchive rst * Updated docs * Updated docs * Updated docs * Removed debug statements * fix pylint issues * Added get restored data sets * Removed replace flag * Added lock process removal * Removed print statements and blank lines * Removed print statements * Removed unused imports * added missing test * Completed uncompleted doc string * Fixed a bug that removed the source when not needed * Fixed pep8 issue * Added removal in correct place * fixed a bug that left behind tmp data sets * Added changes for deleting tmp data sets + pr code review comments * Added a notes and seealso section in the docs * Changed name to xmit_log_data_set * Added comments * Added comments about format * Added more description in terse_pack * Added mode description * Added description for tmp_hlq * Added description for remove * Added 'a' into some statements * Modified dest_state description * Updated option descriptions * Changed badzip error msg * changed adrdssu multiple data set error message * Added tso xmit error message * changed adrdssu multiple data set error message * modified description msg * Updated path to src and changed multiple docs * Fixed module doc issues * Removed unused error and modified error messages * Changed parameter result get * Updated docs in zos_unarchive * Updates docs * Updated docs * Modified mode * Changed src in action * Updated include docs * Updated include docs * Updated docs * Enhanced is_binary support * Fixed dest_data_set issue * Changed docs * Added changes to compute space primary * Added changes to compute space primary * Changed binary test case * Updated docs and test cases * Added xmit_log_data_set docs * Modified dest description * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updated docs * Updaded expanded sources * Updated docs * Updated docs * Updated docs * Updated docs and added dest_data_set * Added dest_data_set * Added primary_space * Fixed pep8 * updated dest_volumes comments * added xmit_log_data_set docs * Updated docs about mode * Added is binary comment * Removed is_binary * Removed is_binary from source * Updated targets list * Updated force docs * Updated the force docs * Updated exclude docs * Updated message for dest in zos_archive * Changed dest to required: true * Removed exclusion_patterns * Updated group in zos_archive * Corrected mode * Updated wording in owner * Updated src * Updated docs * Updated template * Fixed sanity issues * Updated PDS/PDSE to mayus * fixed mem creation * Fixed space type issue * Modifed test for des_data_set * fixed test * Improved dest_data_set * updated docs * updated dest_data_set type * Added latest docs * Removed emergency backup and auto-recovery features. (#896) * Removed emergency backup and auto-recovery features. Initial changelog fragment. * corrected changelog with PR # * Update changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> * removed 2 unused routines: restore_backup and erase_backup. --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * Enhance/839/add field to job query (#841) * Initial change to manage 1.2.4 column changes for job listing * Improved documentation, code validated with playbooks. * Updated changelog fragment with PR * corrected duplicate entry in output documentation * Changes to comments and field names as per PR 841 Update to in-code documentation Expansion of changelog fragment * correction to datetime processing * fixing sample data * Enhancement/850/Redesining test cases to be clearer (#840) * Remove duplicates and unnecesary declaration from lineinfile test * Delete blockninfile repeat and unecesary cases and finishing lineinfile * Solve test do not pass * Summary USS test case lineinfile * Clean ds tests general * Finishing clear lineinfile * Lineinfile clean proposal * Clean lineinfile * Finishing the clean of lineinfile and blockinfile * Lineinfile USS fully cleaned * Lineinfile clean * Clean blocinfile test * Structure for all test cases * Add fragment * Finall version without dictionaries * Add expected to variables encoding test case and simplify names * Close lineinfile * Remove the unnecesary marks * Get better encoding tests * Get better encoding tests * Remove encodings of datasets * Add encoding for ds * Functional tso command test cases currently can not be run concurrently#880 (#895) * Remove all dependent test and summary in one * Add fragment * Solve multiple commands and text about the explanation of testing * Change variables to be accurate * Remove import do not used * Add comments * Solve typos and writting --------- Co-authored-by: ketankelkar <ktnklkr@gmail.com> * Update make file doc generation with pre and post scripts for a subset of modules. (#906) * Update make file and add scripts to correct doc gen Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc with missing definition of timestamp Signed-off-by: ddimatos <dimatos@gmail.com> * Correct doc to remove colon to prevent doc gen warning Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc base on minor module doc changes Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Enhance/911/Improve-job-query-performance (#911) * Initial change to manage 1.2.4 column changes for job listing * Improved documentation, code validated with playbooks. * Updated changelog fragment with PR * corrected duplicate entry in output documentation * Changes to comments and field names as per PR 841 Update to in-code documentation Expansion of changelog fragment * correction to datetime processing * fixing sample data * changed job to pass column options to disable the program_name column tested against fresh zoau build (881) * removed 'testing' comment * updated re-request call to use the kwargs function. note: this is for record-keeping... about to rebuild this section * Modified call chain in job:status to not pull dd's, making it faster added new 'don't get dd' variable in get_job_output internal * corrected pep8 issues (spaces and parentheses) * Addressing requested changes: eliminate double negative Added new changelog fragment, corrected link to PR Updated return documentation to show dependancy on zoau version * Correct grammar in changelog Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * Update Readme with new collection content Signed-off-by: ddimatos <dimatos@gmail.com> * Update copyright yrs Signed-off-by: ddimatos <dimatos@gmail.com> * Generated doc updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to clear the boolean value comes back as 1 or 0 Signed-off-by: ddimatos <dimatos@gmail.com> * Move a fragment that was outside the changlog folder Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment lint error corrections and summary added Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog generated Signed-off-by: ddimatos <dimatos@gmail.com> * release notes Signed-off-by: ddimatos <dimatos@gmail.com> * Delete changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> * Update test with string match Signed-off-by: ddimatos <dimatos@gmail.com> * Unused changed variable, found by flake8 Signed-off-by: ddimatos <dimatos@gmail.com> * Unused 'normalize_line_endings' functiion found by flake8 Signed-off-by: ddimatos <dimatos@gmail.com> * Correct flake8, import 'path' from line 18 shadowed by loop variable Signed-off-by: ddimatos <dimatos@gmail.com> * Correct comment starting at a new line Signed-off-by: ddimatos <dimatos@gmail.com> * Updated new script copyright year Signed-off-by: ddimatos <dimatos@gmail.com> * Fix release notes formatting Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/583/zos lineinfile does not behave community (#916) * First iteration * Clean test apf_func * Add test case to validate change in ZOAU 1.2.4 * Change test case for the new change * Change zos_job_query test accord to ZOAU 1.2.4 * Restore test as dev * Return test to originals * Return job_query as original * Add fragment * Add test case for DS and change fragments * Solve check of testing for DS * Change logic of tests * Deprecate debug=true in zos_blockinfile and set as_json=true (#904) * Change debug for as_json option * Add fragment --------- Co-authored-by: ketankelkar <ktnklkr@gmail.com> * Add test case to validate response come back complete (#918) * Add test case to validate response come back complete * Add fragment --------- Co-authored-by: ketankelkar <ktnklkr@gmail.com> * Remove conditional unnecessary (#934) * Remove conditional unecesary * Add fragment * Correct the conditional * v1.7.0 beta.2 into dev (#953) * Staging v1.7.0 beta.1 (#915) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> *… * Merge release v1.10.0-beta.1 into 'main' (#1509) * Added PR# to changelog tweaked description of new feature in both affected functions. * added changelog for ticket * Modified versions in bug issue template to avoid users picking a non-existing version Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added zoau_api_version logic to check for 1.2.5 or later as a condition for wait_arg Added mention of this to documentation of interface * corrected pep8 errors * removed redundant changelog fragment * Changed Enhancements to minor_changes * push updated module doc Signed-off-by: ddimatos <dimatos@gmail.com> * Enabler/validate path join (#962) * Added real path fetch to base * Sec changes * Updated changelog and template * Added validation to zos_copy and zos_fetch Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Corrected positional argument * Added validation changes Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Removed f-string * Fixed path join for copy_to_file --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Removed 'wait' as an option, and pass wait_arg=true to zoau * Added latest to allow member copy when disp=shr (#980) * Added latest to allow member copy when disp=shr * Added changelog fragment * Added new force option and test for locked data sets non VSAM * Fixed pep8 issue * Added new option force lock * Modified test case with new option * Added force option * Added doc and warning * Updated changelog fragment * Update 980-zos-copy-disp-shr.yml * Updated changelog fragment * Removed unused comments Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Added message * Added force_lock to all CopyHandlers * Modified test case * Changed use of dataset vs data set --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Changed operator_action_query to wait=false time = 5 Renamed vague variable name to "use_wait_arg" Reflected changes and 1.2.5 dependancy in the changelog fragment * Enhancement/423/zos copy add data set member alias support (#1014) * add aliases option and enable text-based member copy w alias to an existing pds Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * disable alias included in data set member listing when collecting src members from pds Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch 'alias' option introduced in zoau1.2.5 to '-H' flag available in zoau1.2.4. also enable alias copying of executables Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * implement aliases for copy to/from USS, add guard rail for non-executable copy to USS with aliases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add exception handler for executable PDS copy, handle non-existent library pds for executable USS src, add error message for PDS copy attempt to USS file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up init functions, break up long lines Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * refactor executable member to member copy for alias work, this commit refactors some helpers which break a select few loadlib tests, but those will be refactored in upcoming comimts Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * refactor and expand test_copy_pds_loadlib_member_to_uss test case to copy to a new loadlib Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add aliases error raised check to text-based pds member copy to uss Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add aliases error raised check to text-based pds member copy to uss Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * parametrize cobol program to pass in custom output string, create helper method around running and validating loadlib pgms, refactor executable tests to use helper method, add helper method to create loadlib w multiple members, add test case for loadlib to loadlib copy w and w/o aliases. Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add test case for copying entire loadlib to uss dir and then to another loadlib. refactor other loadlib test case to reduce loc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new pytest markers for aliases and loadlib test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in a sneak preview version of bug #920 addressed in PR #968 which adds LIBRARY as a valid value to the dest_data_set option Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * alter placement of aliases option to go after executable options Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add docs and examples for aliases option Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * minor tweaks to doc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * addres santiy check issues Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * missed a sanity check issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve remaining merge conflicts Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 style issues Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup spacing issue in examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add updated rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add comments for explaning logic/code flow around full pds copy Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * New module zos_script (#961) * First version of the action plugin and module doc * Added remote execution * Fixed handling and cleanup of temp files * Fixed mode setting for scripts * Fixed undefined variable error in action plugin * Fixed chdir when running command * Added creates and removes options * Changed encoding validation * Filled out docs for the module * Added examples * Filled out RETURN block for module * Enhanced error messages * Generated docs for zos_script * Added first tests for zos_script * Reordered args * Added mode check to remote script test * Fixed template rendering * Fixed tests * Added tests for error handling and templates * Fixed a sanity error when returning a failure JSON * Updated ignore files * Updated module docs * Updated repository templates for issues * Fixed whitespace in docs * Updated tmp_path description * Updated notes in documentation * Removed use of local_charset * Removed private args This commit finishes the work needed to remove two sanity tests exceptions. * Fixed permissions for remote scripts * Updated module documentation * Updated documentation for tmp_path * main change to version checker for consolidation * Removed old version checker/shell call, added changelog entry * Changed location of duration value set, so it will always be populated. * found another edge case where duration was not being returned. * added tolerance to test on max_rc, where duration is not always returned. * Staging v1.7.0 merge to main (#1019) (#1023) * Galaxy 1.7 updates * Update meta runtime to support ansible-core 2.14 or later * Update ibm_zos_core_meta.yml with updated version * Update readme to align to supported ansible versions and new urls * Added additional sanity ignore files to the exclude list * Added additional sanity ignore files to the exclude list for ansible-lint. * Update copyright yrs for source files that were overlooked * Remove requirements from module doc, rely on offerings minimum requirements, also zoau 1.2.1 never was supported * Add changelog summary for 1.7 * Adding generated antsibull-changelog release changelog and artifacts * Remove v1.7.0_summary, its no longer needed * Update release notes for ac 1.7.0 * Remove unsupported collection versions requiring a version of zoau that is EOS --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> * Enabler/validate path join part 2 (#1029) * Added real path fetch to base * Sec changes * Updated changelog and template * Added validation to zos_copy and zos_fetch Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Corrected positional argument * Added validation changes Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Removed f-string * Fixed path join for copy_to_file * Added validation function to template * Added new files * Added changelog fragment --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Add known issues doc for utf8 issues (#1035) * Add known issues doc for utf8 issues Signed-off-by: ddimatos <dimatos@gmail.com> * Updated changelog fragement to remove colon usage Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected changelog errors Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * [Enhancement] [zos_copy] Add support for ASA control chars (#1028) * Added ASA support when copying from datasets with control chars * Added ASA support when copying from USS to sequential data sets * Added ASA support when copying from USS to PDS/E * Re-enabled copy from datasets to USS * Added copy from non-ASA data sets to seq ASA ones * Added copy from non-ASA data sets to partitioned ones * Added allocation of ASA destination data sets * Added first version of ASA compatibility validation * Added validations for asa_text * Added asa_text documentation * Fixed record_length issue when creating ASA data sets * Fixed record_length issue * Added asa_text example * Added first test * Added DBRM doc to zos_copy module (#1025) * Added DBRM doc to zos_copy module Signed-off-by: ddimatos <dimatos@gmail.com> * Removed an extra space from doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added more tests * Added changelog fragment * Updated RST files * Removed unused import * Fixed merge * Added another validation * Updated docs * Changed use of tsocmd for mvscmd * Fixed merge error * Fixed bug with VSAM sources * Fixed bug with VSAM destinations --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * Merge staging-v1.8.0-beta.1 back to dev (#1040) * Resolved merge conflicts from cherrypick * Delete changelogs * Fix sending a local archive into remote fails (#1045) * Added test * Added test for local archive and then remote unarchive * Added changelog * Removed format fixture * Ensure tempfile cleanup * Clean temporary data sets created during XMIT unarchive operation (#1049) * Added a temp cleanup * Added changelog * Modified changelog * Added removal of src if remote_src is False Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Modified changelog fragments --------- Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> * Enabler/1002/test_collections_on_ansible core 2_16 (#1053) * Add sanity ignore * Move dependencyfinder * Revert "Move dependencyfinder" This reverts commit 2bbbc5adffe94b32dd6d1af12f7c7cace93cca94. * Modify dependecyfinder * Modify dependecyfinder * Modify dependecyfinder * Add fragment * Modify fragment * [Enabler] [zos_mvs_raw] Remove Try, Except, Pass from code (#1051) * Added action inside exception to avoid pass * Added action inside exception to avoid pass * Added changelog * Update sanity test ignore (#1048) * Fixed Sanity ignore #6 and reduce use of Sanity issues 8-9 * Remove unused import * Remove ignore * Add fragment * Modify fragment * Change fragment * Update 1048-Update_sanity_tests_ignore.yml * Change ignore 2_dot_14 --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Update_sanity_ignore 2_16 (#1056) * Update sanity ignore 2_16 * Add fragment * Change fragment * [Enabler] [module_utils/dd_statement.py] Changed try except pass to except specific DatasetDeleteError exception (#1052) * Changed try except pass to except specific class * Added changelog * Update 1052-try-except-pass-dd-statement.yml * Update dd_statement.py updated copyright year * Removed run command use of subprocess from encode defaults infavor of using AnsibleModule run command (#1055) * Removed run command use of subprocess from encode defaults infavor of using ansible module * Added changelog * [1.9.0] zos_apf remove try expect pass to better exception handling (#1036) * Removed except pass * Added empty strings * Added changelog * Corrected changelog * Modified if statement to honor current behavior * Update 1036-apf-try-except.yml * Update 1036-apf-try-except.yml * [v1.9.0] Replace randint to fix "Standard pseudo-random generators ... " bandit warning (#1016) * Changed random member name generation to choices * Modified choices in module_utils/data_set and blockinfile test * Added changelog fragment * Remove randint import * Removed unused imports * 1043 bug title zos operator is passing wrong value to zoauopercmd (#1044) * corrected kwarg index value from 'wait_arg' to 'wait' Also corrected true/false issue in zoaq * Added and updated changelog. * update PR number in changelog fragment * changed test from \$ to \\$ to eliminate warning * added blocking test to maks sure minimum wait is reached in zoau>1.2.4.5 * removed the else condition from the blocking test, since it is not needed. * corrected tense grammer in changelog fragment * corrected capitalization of ZOAU in changelog fragment. * Bug 1041 zos submit job honor return output literally (#1058) * initial commit to pass return_output to job_output. * corrected fragment name to match branch * tweaked data set test to show result values if positive test fails * removed trace in zos_data_set, and added trace output to job_submit * removed extra text from functional testing. * put in correct PR number in changelog fragment. * changed trivial to minor_changes, added documentation to dd_scan in job:job_output. * Enabler/1024/remote_tmp for zos_script (#1060) * Changed tmp_path for Ansible's remote_tmp * Remove tmp_path from module's options * Update module documentation * Remove tmp_path test case * Update zos_script's RST file * Add changelog fragment * Updated module examples * [zos_copy] Files corrupted after second copy (#1064) * Initial change to replace shutil.copy * Added fix for corrupted directory copies * Added changelog fragment * Modified docstring and fixed copy_tree * Added punctiation * Added copystat * Added set mode for dirs * Update 1064-corruped-second-copy.yml * [Documentation] [zos_tso_command] Add REXX exec example (#1065) * Add REXX exec example * Add fragment * Update module documentation * Fix PR link * Reword example task name * Updated REXX example * Mvs to non existent mvs copy destination attrs match up (#1066) * Fixed error * Fix identation * Add fragment * Modify fragment * Modify fragment * Modify fragment * Bugfix/374/module zos mvs raw errors with long multi line quoted string in content field (#1057) * Add function of write content * Push easy soultion for two cases * Fix identation and more issues * Fix identation and more issues * Solve error of null * Add validation comments and separete the code * Add fragment * Modify logics * Return overthink * Add explanation for the user and change logic * Add explanation for the user and change logic * Change documentation * Change fragment * Better error message, better documentation and fragment * Get better mesages * Change the logic * Change documentation * Change logic * Add scape to # * Check failing * Check failing * Add valid scapes * Update zos_mvs_raw fragment and module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * Remove tarfile.exctractall and zipfile.extractall in favor of individual member extraction for sanity (#1077) * Implemented extract_all function * Added changelog * Removed commented lines * Update ac tool version of pyyaml to avoid wheel issue Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Fixed sanity issues with zos_mvs_raw (#1084) * [bugfix][v1.9.0][zos_unarchive]action plugin does not clean up remote temporary files after completion (#1073) * Push solution * Add fragment * Modify logic for fails and ensure works * Change bug * Chamge fragment * Chamge fragment * Add comma --------- Co-authored-by: Demetri <dimatos@gmail.com> * [v1.9.0] Bugfix/837/missing ZOAU imports (#1042) * Added new missing import handler to zos_operator * Added new import handler to job util and zos_job_query * Added ZOAU check to zos_operator * Fixed sanity test issue * Fixed Python 2.7 sanity issue * Add changelog fragment --------- Co-authored-by: Demetri <dimatos@gmail.com> * [Enhancement][zos_job_submit] Modify error messages to be clearer (#1074) * Improved error messages * Updated failure logic --------- Co-authored-by: Demetri <dimatos@gmail.com> * update link in managed_node doc (#1089) * update link in managed_node doc Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Updated docs regarding managed node Signed-off-by: ddimatos <dimatos@gmail.com> * Doc fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * First check of solution * Add clear solution * Return local * Add test ignores * Add ignore to 2.16 * Optimize the encoding * Remove unused import * Return encoded * Add encoding * Remove unused encoded * Modified logic to remove tmp files if they exist and not only if location is local * Added temp_file null definition * Update 1091-Update_undocumented_argument_and_import_exception.yml * Merge release v1.8.0 into dev branch (#1095) * Staging v1.7.0 beta.1 (#915) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * fix minor mistake in func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update DOCstring for missed items highlighted in PR review Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update copyright years in data set module util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Update to doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * update doc, rst Signed-off-by: ddimatos <dimatos@gmail.com> * Update doc and examples Signed-off-by: ddimatos <dimatos@gmail.com> * Added handler for job not found edge cases (None not iterable errors) * corrected pep8 issue (bad indent) * removed tracking text from error/not found messages. * Update zos_job_query.py * Add and automate a load lib test case for module zos_copy (#640) * Modified if statement * Added changelog fragment * Corrected statements Corrected PR in changelog fix lint issue * Adding test case on the work * Added assertion to loadlib test Changed cobol src Added newlines * Correcting cobol src * Removed unnecessary comments * Removed deprint statements for debug * Update 601-copy-loadlib-member.yml * Reduce space primary * Make sure cobol is executable * corrected name * Corrected names * Adding debug statements * modified name in link step * Changing copy name * Removed print statements for debug * Removed previous changelog * Removed unused fragment * Removed test case * Add max_rc support for module zos_tso_command (#666) * rebuilding 565 * fixing pep8 issue * corrected not in code, added functional test to use maxrc * changing maxrc return codes as indicated * updated copy_rexx to incorporate max_rc * pep8 issue on an if statement * changed test to dump intermediate values temporarily * added max_rc to arg params to get it to pass through * removed extra output on test * Added changelog fragment. * Update 565-zos_tsocommand_maxrc.yml * PARTIAL correction based on reviewer's feedback. Will complete later today. * Partial push: change origrc to orig_rc and added documentation. Added experimental loop for multiline results. Changed error case to continue through the commands even after a failure (?!) * Changes based on review feedback: - rc is actual returned value - origrc/orig_rc removed - in a list of commands, first error (considering max_rc) stops the chain - while processing commands, a list of success/failures are kept for error state info - changed listds to a longer, more complete string in example * updated test case for max_rc to use longer, more complete listds command * Minor pep8 and yamllint changes * Updated string builder to use append and join, for efficiency. * Update changelog fragment to correspond to pull request number Signed-off-by: ddimatos <dimatos@gmail.com> * removed old fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Changed output of command-by-command to use max_rc instead of Max RC, for consistency. * clarifying failure message when user hasn't set a max_rc. * corrected date on copyright notice to include current year. * corrected copyright date on test case --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> * added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * zos_copy forward port a test case for symbols on a volume #739 (#740) * added test case for symbols on a volume Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog fragment added Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Volume Initialization Module (#654) * Adding ZOS_INIT plugins The module and action for ZOS_Init command, under zos_ickdsf_command. Designed for version 1.3.1, and example given references removed zos_ssh.py. * Create zos_ickdsf_command.rst Tentative documentation. Does not quite list every return, and program may change to work with overall zos core collection. * Update zos_ickdsf_command.py fixed to pass pep8 sanity test. * Update zos_ickdsf_command.py Edited zos_ickdsf_command.py in action folder to pass pep8 style check. * bugfix - undo hard-coded storagegroup param from command passed to mvs_raw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update encoding and licensing info Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * edit descriptions, leaving in old to verify messaging with dfsms team Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to buildix Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * cleanup examples docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add return docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add some unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update dict names, some clean up Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update interface and docs with cleaner descriptions and define default values and required true/false where applicable Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove init param and bring out sub-options one level Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module from zos_ickdsf_command to zos_ickdsf_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove files referring to zos_ickdsf_command; i forgot to untrack these in the previous commit Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove params from intitial definition of results dict which are not always present, remove unused return params, rename message to msg to match ibm_zos_core and ansible style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add ignore missing gplv3 license warning from sanity testings to existing ignore files Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * style edits to pass sanity tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add in-depth task name to first example docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add minimal params functional test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert action plugin to module_util, remove IckdsfCommand and CommandInit class definitions and custom error classes, move convert method to module_util Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename rc param, update func test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add note about disabled verify_offline check Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * removed volume_address checks which can be handled by ansiblemodule parsing validation or ickdsf itself; changed function signature of convert method to include results dicts so it can pass it to fail_json method in the case of module failure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add invalid volume_address func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * change param names, organize mvscmd return output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * consolidate negative func tests, add negative tests for vtoc_tracks param, start list for postive tests to implement Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func tests for "verify_existing_volid" and "verify_no_data_sets_exist" Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add volid positive and negative func tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add mutual exclusion to no index/sms_managedw with func test, minor edits to comments * add func tests for index param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add func test for vtoc_tracks param Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove 'addr_range', 'volid_prefix' and 'output_html' params; add example with ansible loop to init multiple serial volumes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * convert 'content' return param to list for prettier playbook output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add specific examples to docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add guard rail test with volser check set to ensure target volume is as expected Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch target test vol to user02, remove existing data sets in initial test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * start adding author names to docstring, edits of yaml for sanity test Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename module to zos_volume_init Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove unit test suite which was not kept up to date with code restructure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove old/outdated .rst file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * bring up to speed with v1.5.0-beta.1 which is currently more recent than dev Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add sanity test 2.15 file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * clean up trailing empty lines/space chars Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * more pep8 style Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * pull in more changes from latest dev branch Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * tweak tests/sanity/ignore-2.10.txt file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename volume_address to address, rename verify_no_data_sets_exist to verify_volume_empty, rename verify_existing_volid to verify_volid, rename vtoc_tracks to vtoc_size, rename return param command to cmd, make suggested doc changes, switch off check_mode Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 line too long issue Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add year to copyright, add gh user name to author, restyle true/false with code rather than italics, remove backslashes in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_fetch test case to create data set instead of using one that gets wiped away by zos_volume_init test cases Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * address PR comments including pep8 styling, docstring changes including add seealso section, add details around authorized usage of ickdsf, some light wordsmithing, and remove cmd from return structure Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add backticks to other special chars in docs for consistency Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * on more formatting tweak to national chars in docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add new param to support tmp_hlq Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * extend tmphlq binding to sysprint(*), disable verbose mode for mvscmd call Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove references to betterargparser - it cannot be used in its current state as it is too restrictive on volume serial and qualifier args Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove soft language ie beware, please; reduce line lengths, fix a spelling error Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * reorganize test case code for more logical grouping and un-nesting from a for loop, also update copyright year that was missed Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update display command check to only check particular lines instead of entire output in test_volid_address_assigned_correctly which would always be positive when tested against the entire output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> * Ported bugfixes for #664 and #678 from 1.5.0 (#743) * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Updated changelog fragment * Updated copyright information * Added comments to record length tests * Added more context to the record length normalization * Add force (disp=shr) option to zos_lineinfile and update zos_blockinfile with the same locking test case. (#731) * Add DISP=SHR by function force to the user * Add helper function for force cases only for EOF inserts * Test cases added * Just to check in the change case * All test cases for force in zos_lineinfile working properly * Test cases for force blockinfile * Fixes for style of code * One identation error * One identation error * One identation error * Documentation error * All test cases for line infile and block infile * Comments * Change the stdout to assert with expected of ds * Error of test case * Fragment added * Solve typo errors * Update to remove uncessary comment Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update module with force comments Signed-off-by: ddimatos <dimatos@gmail.com> * Solve some test problems * Copyright year * Copyright year * Add cases for other ecodes * Add test for all encodings * Cover all encodes test cases with force * Blockinfile force added for other encodes and DS types --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove duplicate function (#753) * Update branch production branch Main with release v1.5.0 content (#756) (#758) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always * Linting correction * Add changelog fragement for pr 683 * Clean up comments in code * Update restructured text for modules and changelog fragment * Add support to check for security exception to job submit module * Add additonal logic for jobs that use typerun=scan * Update chnagelog fragment * Correct run on sentence --------- * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example * Added change fragment --------- * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system * Add changelog fragment * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments --------- * bring in updates to zos_gather_facts RETURN docstring (#744) * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests * Update release notes and changelog with full date formats --------- * remove changelog fragments no longer needed (#754) --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge master to dev for 1.6.0 beta.1 (#763) * Update branch production branch Main with release v1.5.0 content (#756) * Bug fix to correct job log returining and other edge cases (#688) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Correct run on sentence Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional `D SYMBOLS` example to zos_operator documentation (#729) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added change fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Ported bugfixes for #664 and #678 * Updated file normalization for sequential datasets * Fixed encoding when a remote source is used * Added encoding normalization for record length (#664) This is a port and tweak from the normalization found in the L2 support branch. Now the normalization applies for both local and remote source files. There is still work left to validate how this behaves when copying to partitioned datasets, and see if the normalization needs to handle directories as well. * Refactored normalization for USS files * Updated line endings normalization in PDSEs * Updated create_temp_with_lf_endings * Added tests for line endings normalization * Added changelog fragment * Add copy lib member test case * Corrected changelog fragment * Add conflict resolution * Update 641-copy-loadlib-member.yml * zos_copy forward port a test case for symbols on a volume (#739) * Add a test case for symbols using an existing volume on our test system Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update 739-zos_copy-volume-symbol-test.yml Update description to use symbols overs special chars. * Update test case with added comments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * bring in updates to zos_gather_facts RETURN docstring (#744) Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Solve problem 619 about set mode on dest file (#746) * Solve problem 619 about set mode on dest file * Add fragment of 746 PR --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * Merge release tasks for v1.5.0 into release branch (#752) * Updated metadata * Updated copyright information * Updated copyright in README * Generated changelog * Added previous versions to changelog * Changed a minor_change fragment to bugfix * Updated release notes * Update dependency finder to allow for all tests minus skip tests Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes and changelog with full date formats Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * remove changelog fragments no longer needed (#754) Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * v1.6.0-beta.1 release (#759) * Added tagging of converted files * Updated tests for zos_encode * Added restoration of PDS/PDSE members when module fails zos_copy can now track which members got overwritten or newly added to a partitioned data set, and restore them accordingly. This commit includes a refactorization of the copy_to_pdse method to simplify the restoration work. * Fixed cleanup of dest when module fails * Removed exception used for debugging * Added pytest markers * solved pep8 issue * Added more information to error when restoration fails * Update make file with new features to manage the nodes (#563) * Update make file with new features to manage the nodes Signed-off-by: ddimatos <dimatos@gmail.com> * Correct typo Signed-off-by: ddimatos <dimatos@gmail.com> * updated makefile to allow for no password in the clear and added a new host Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that failed to copy the cert to the managed node (#577) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Added apropiate error message in zos_lineinfile when src is not found * Better error message * Added fragment for changelog * Update git issue slection field (#593) * Update git issue slection field * Added some additional future versions * Added rule to ignore python 2.7 compile not supporting f strings * Corrected rule in ignore file * Update 584-zos_lineinfile-error-message.yml * Added missing fragments for issues 309 and 408 * update make file and related artifacts with recent zVM changes (#598) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Expand what is ingored for venv to all venv's (#613) Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Ansible-core versions with updated ignore file pass (#615) * Validated that ansible-core versions with updated ignore file pass locally Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> * remove redundant entry on last line Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> * Update make file to read exported var VENV else default to 'venv' (#614) * Update make file to read exported var VENV else default to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Correct the default from defaultValue to 'venv' Signed-off-by: ddimatos <dimatos@gmail.com> * Bugfix/558/zos copy backup dev (#609) * Create emergency backup only if force is set to False and dest exists * Added Changelog fragment * Updated imports ansible.module_utils._text to ansible.module_utils.common.text.converters (#602) * Updated import in zos_fetch * Updated import in zos_job_submit * Updated import in module_utils/data_set * Updated import in module_utils/System * Updated import in zos_copy * Updated import in zos_fetch * Added changelog fragment * Update enhancement-518-text-converter-import.yml * Updates the makefile and related scripts for the latest mounts (#628) * Update makefile coments and use --ignore for pytest Signed-off-by: ddimatos <dimatos@gmail.com> * lexicographical order targets Signed-off-by: ddimatos <dimatos@gmail.com> * Update makefile and artifacts to support new mounts Signed-off-by: ddimatos <dimatos@gmail.com> * Removing encrypted versions of the scripts, no longer senstive content Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * update profile created for mount points Signed-off-by: ddimatos <dimatos@gmail.com> * Correct f-string usage to remain 2.7 compatible (#659) * Correct f-string usage to remain 2.7 compatible Signed-off-by: ddimatos <dimatos@gmail.com> * Update changelog fragments Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Updated shell scripts for development tooling Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Add issue template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Update collab template Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * Template updates Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * add udpated checkbox to collab template Signed-off-by: ddimatos <dimatos@gmail.com> * changed zos_job_query to allow multi/embedded wildcard job_values. job.py/_get_job_status now uses fnmatch to test for wildcarded job_names. added test looking for a wildcard job name. added basic changelog text as 'enhancement'. * expanded wildcard support to job_id as well as job_name corrected error-handling issue in query routine playbook tests complete * cleaned up pep8 issues * Bug fix to correct job log returining and other edge cases (#683) * Update zos_job_submit to handle some edge cases and return the job log nearly always Signed-off-by: ddimatos <dimatos@gmail.com> * Linting correction Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement for pr 683 Signed-off-by: ddimatos <dimatos@gmail.com> * Clean up comments in code Signed-off-by: ddimatos <dimatos@gmail.com> * Update restructured text for modules and changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Add support to check for security exception to job submit module Signed-off-by: ddimatos <dimatos@gmail.com> * Add additonal logic for jobs that use typerun=scan Signed-off-by: ddimatos <dimatos@gmail.com> * Update chnagelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_job_submit.py Grammar change and correction. --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added uss_tag_encoding function * Fixing linter issues * removed extraneous comment on query, eliminated unused variable * responding to reviewer comments * Expanded query tests to run a hello jcl file to make sure we are searching for a job id that exists. * Expanded documentation in code to highlight asterisk availability. Added examples showing multi-asterisk search. * Corrected 2 documentation errors * Change to documentation text (indent on multi line string?) * Still trying to get documentation to pass * Looks like '---' was killing documentation block. * Update zos_blockinfile with complex examples (#727) * Update zos_blockinfile with complex examples Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragement Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional D SYMBOLS example to zos_operator documentation (#730) * Update zos_operator with an additional example Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> * zos_gather_facts - add sample output to RETURN docstring (#722) * zos_gather_facts - add sample output to RETURN docstring Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * zos_gather_facts RETURN docstring - add sample sysplex_name and lpar_name values Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update generated module doc Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: ddimatos <dimatos@gmail.com> * 574find (#668) * change to allow find loop to continue when one particular data set element is not found This has been validated with a zos-check-find.yaml playbook * added functional test that includes non-existant pds entry, which should be skipped over * added changelog fragment. * zos_blockinfile , can quotes in content can be supported (#680) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * A needed space * All in structured * Comments solved * Better order --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: Demetri <dimatos@gmail.com> * zos_copy mode is applied to the destination directory, a deviation from the communtiy module behavior. (#723) * Verify coomand of ZOAU support the doble quotes and get better output of message * Verify coomand of ZOAU support the doble quotes and get better output of message * Restore to the one tyme function solving TypeError * Test about cases with quotes supported * Solve comments * Comments in the changelog * Adjust test for working accord the text * Solve the dest functional mode set for the applied to destination directory * Identation and spaces * To work well * To work well * To work well * To work well * Changelogs added * Solved the fragment test and separte the cases --------- Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> * corrected job test case that wanted to extract job id. * changed call to zos_job_query in the functional test. * zos_data_set - add force parameter to enable member delete via disp shr (#718) * add force parameter to enable member delete via disp shr Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update link to PR instead of issue Signed-off-by: Ketan Kelkar <ktnklkr@gma… * Change description for creating an enabler Signed-off-by: ddimatos <dimatos@gmail.com> * Fix readme support matrix link Signed-off-by: ddimatos <dimatos@gmail.com> * Update module docs Signed-off-by: ddimatos <dimatos@gmail.com> * update copyright Signed-off-by: ddimatos <dimatos@gmail.com> * Update release notes to reflect 1.9.1 Signed-off-by: ddimatos <dimatos@gmail.com> * Revert change from dev to remove the verification in the error Signed-off-by: ddimatos <dimatos@gmail.com> * remove unsued print Signed-off-by: ddimatos <dimatos@gmail.com> * Remove usued global var Signed-off-by: ddimatos <dimatos@gmail.com> * Merge errors corrected Signed-off-by: ddimatos <dimatos@gmail.com> * Removed the unused code snippet Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> Signed-off-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: Oscar Fernando Flores Garcia <fernandofloresdev@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> Co-authored-by: ketankelkar <ktnklkr@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@MacBook-Pro-de-Andre-2.local> Co-authored-by: trevor-glassey <Trevor.Glassey@ibm.com> Co-authored-by: Andre Marcel Gutierrez Benitez <andre@mbp-de-andre-2.gdl.mex.ibm.com> Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> Co-authored-by: IsaacVRey <isaac.vega.rey@gmail.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 19 +- .../ISSUE_TEMPLATE/collaboration_issue.yml | 19 +- .github/ISSUE_TEMPLATE/doc_issue.yml | 13 +- .github/ISSUE_TEMPLATE/module_issue.yml | 2 +- CHANGELOG.rst | 417 +++++++++- README.md | 301 +++++-- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 167 ++++ docs/scripts/pre-template.sh | 2 +- docs/source/index.rst | 11 +- docs/source/modules/zos_apf.rst | 8 +- docs/source/modules/zos_apf.rst-e | 318 -------- docs/source/modules/zos_archive.rst | 2 +- docs/source/modules/zos_backup_restore.rst | 6 +- docs/source/modules/zos_copy.rst | 2 +- docs/source/modules/zos_data_set.rst | 161 +++- docs/source/modules/zos_encode.rst | 5 +- docs/source/modules/zos_lineinfile.rst | 5 +- docs/source/modules/zos_mount.rst | 15 +- docs/source/modules/zos_mvs_raw.rst | 117 +-- docs/source/modules/zos_operator.rst | 10 + .../modules/zos_operator_action_query.rst | 2 +- docs/source/modules/zos_unarchive.rst | 4 +- docs/source/modules/zos_volume_init.rst | 12 +- docs/source/plugins.rst | 3 +- docs/source/reference/community.rst | 17 + docs/source/reference/documentation.rst | 18 + docs/source/release_notes.rst | 760 +++++------------- docs/source/requirements-single.rst | 11 +- .../source/resources/releases_maintenance.rst | 98 +++ docs/source/resources/resources.rst | 3 +- meta/ibm_zos_core_meta.yml | 2 +- plugins/action/zos_copy.py | 3 +- plugins/action/zos_fetch.py | 2 +- plugins/action/zos_job_submit.py | 2 +- plugins/action/zos_ping.py | 2 +- plugins/action/zos_unarchive.py | 2 +- plugins/filter/wtor.py | 6 +- plugins/module_utils/encode.py | 2 +- plugins/module_utils/ickdsf.py | 2 +- plugins/module_utils/import_handler.py | 2 +- plugins/module_utils/job.py | 6 +- plugins/module_utils/mvs_cmd.py | 2 +- plugins/module_utils/template.py | 2 +- plugins/module_utils/validation.py | 2 +- plugins/module_utils/zoau_version_checker.py | 2 +- plugins/module_utils/zos_mvs_raw.py | 2 +- plugins/modules/zos_apf.py | 8 +- plugins/modules/zos_archive.py | 4 +- plugins/modules/zos_backup_restore.py | 6 +- plugins/modules/zos_blockinfile.py | 4 +- plugins/modules/zos_copy.py | 4 +- plugins/modules/zos_data_set.py | 14 +- plugins/modules/zos_encode.py | 7 +- plugins/modules/zos_fetch.py | 2 +- plugins/modules/zos_gather_facts.py | 2 +- plugins/modules/zos_job_output.py | 2 +- plugins/modules/zos_job_query.py | 2 +- plugins/modules/zos_lineinfile.py | 5 +- plugins/modules/zos_mount.py | 17 +- plugins/modules/zos_mvs_raw.py | 107 ++- plugins/modules/zos_operator_action_query.py | 4 +- plugins/modules/zos_ping.rexx | 2 +- plugins/modules/zos_script.py | 2 +- plugins/modules/zos_unarchive.py | 6 +- plugins/modules/zos_volume_init.py | 14 +- scripts/mounts.env | 6 +- scripts/requirements-2.12.env | 4 +- scripts/requirements-2.13.env | 2 +- scripts/requirements-2.14.env | 2 +- scripts/requirements-2.15.env | 4 +- scripts/requirements-2.16.env | 2 +- scripts/venv.sh | 2 +- tests/functional/modules/test_zos_apf_func.py | 4 +- .../modules/test_zos_archive_func.py | 2 +- .../functional/modules/test_zos_copy_func.py | 3 +- .../modules/test_zos_data_set_func.py | 3 +- .../modules/test_zos_encode_func.py | 2 +- .../functional/modules/test_zos_fetch_func.py | 2 +- .../functional/modules/test_zos_find_func.py | 2 +- .../modules/test_zos_gather_facts_func.py | 2 +- .../modules/test_zos_job_output_func.py | 2 +- .../modules/test_zos_job_submit_func.py | 3 - .../modules/test_zos_lineinfile_func.py | 8 +- .../functional/modules/test_zos_mount_func.py | 2 +- .../modules/test_zos_mvs_raw_func.py | 2 +- .../test_zos_operator_action_query_func.py | 2 +- .../modules/test_zos_operator_func.py | 2 +- .../modules/test_zos_tso_command_func.py | 1 - .../modules/test_zos_unarchive_func.py | 4 +- tests/helpers/volumes.py | 2 +- 91 files changed, 1540 insertions(+), 1313 deletions(-) delete mode 100644 docs/source/modules/zos_apf.rst-e create mode 100644 docs/source/reference/community.rst create mode 100644 docs/source/reference/documentation.rst create mode 100644 docs/source/resources/releases_maintenance.rst diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 9395c85b1..dba9db3b8 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -39,19 +39,12 @@ body: - v1.11.0-beta.1 - v1.10.0 - v1.10.0-beta.1 - - v1.9.0 - - v1.9.0-beta.1 - - v1.8.0 (default) + - v1.9.0 (default) + - v1.8.0 - v1.7.0 - v1.6.0 - v1.5.0 - - v1.4.1 - - v1.3.6 - - v1.3.5 - - v1.3.3 - - v1.3.1 - - v1.3.0 - default: 8 + default: 6 validations: required: true - type: dropdown @@ -70,10 +63,6 @@ body: - v1.2.4 - v1.2.3 - v1.2.2 - - v1.2.1 - - v1.2.0 - - v1.1.1 - - v1.0.3 default: 5 validations: required: true @@ -88,8 +77,6 @@ body: - v3.12.x - v3.11.x (default) - v3.10.x - - v3.9.x - - v3.8.x default: 2 validations: required: true diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index fb8ff3a00..0dbf462bd 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -48,19 +48,12 @@ body: - v1.11.0-beta.1 - v1.10.0 - v1.10.0-beta.1 - - v1.9.0 - - v1.9.0-beta.1 - - v1.8.0 (default) + - v1.9.0 (default) + - v1.8.0 - v1.7.0 - v1.6.0 - v1.5.0 - - v1.4.1 - - v1.3.6 - - v1.3.5 - - v1.3.3 - - v1.3.1 - - v1.3.0 - default: 8 + default: 6 validations: required: false - type: dropdown @@ -79,10 +72,6 @@ body: - v1.2.4 - v1.2.3 - v1.2.2 - - v1.2.1 - - v1.2.0 - - v1.1.1 - - v1.0.3 default: 5 validations: required: false @@ -97,8 +86,6 @@ body: - v3.12.x - v3.11.x (default) - v3.10.x - - v3.9.x - - v3.8.x default: 2 validations: required: false diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index dcc6dfda2..b7de4a490 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -40,19 +40,12 @@ body: - v1.11.0-beta.1 - v1.10.0 - v1.10.0-beta.1 - - v1.9.0 - - v1.9.0-beta.1 - - v1.8.0 (default) + - v1.9.0 (default) + - v1.8.0 - v1.7.0 - v1.6.0 - v1.5.0 - - v1.4.1 - - v1.3.6 - - v1.3.5 - - v1.3.3 - - v1.3.1 - - v1.3.0 - default: 8 + default: 6 validations: required: false - type: dropdown diff --git a/.github/ISSUE_TEMPLATE/module_issue.yml b/.github/ISSUE_TEMPLATE/module_issue.yml index 7723b85f1..773ebbc90 100644 --- a/.github/ISSUE_TEMPLATE/module_issue.yml +++ b/.github/ISSUE_TEMPLATE/module_issue.yml @@ -32,6 +32,6 @@ body: - As a < type of user >, I want < some goal > so that < some reason >. Examples: - As a z/OS System Admin, I can grow zFS aggregates with Ansible so that my data sets don't fill up. - - As a Junior developer, I want to be able to zip and unzip arives using Ansible, so that I don't have to perform operations elsewhere. + - As a Junior developer, I want to be able to zip and unzip archives using Ansible, so that I don't have to perform operations elsewhere. validations: required: false diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d2f69d546..74556dc14 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,9 +1,412 @@ -============================== -ibm.ibm_zos_core Release Notes -============================== +================================ +ibm.ibm\_zos\_core Release Notes +================================ .. contents:: Topics +v1.10.0-beta.1 +============== + +Release Summary +--------------- + +Release Date: '2024-05-08' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- zos_apf - Enhanced error messages when an exception is caught. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). +- zos_backup_restore - Add tmp_hlq option to the user interface to override the default high level qualifier (HLQ) for temporary and backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1265). +- zos_copy - Documented `group` and `owner` options. (https://github.com/ansible-collections/ibm_zos_core/pull/1307). +- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1183). + +Breaking Changes / Porting Guide +-------------------------------- + +- zos_archive - option ``terse_pack`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_backup_restore - option ``space_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_data_set - option ``record_format`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_data_set - option ``space_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_data_set - option ``type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_data_set - options inside ``batch`` no longer accept uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_job_submit - option ``location`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``automove`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``fs_type`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` of ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. This also applies when defining a ``dd_data_set`` inside ``dd_concat``. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). +- zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + +Bugfixes +-------- + +- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1261). +- zos_apf - List option only returned one data set. Fix now returns the list of retrieved data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). +- zos_blockinfile - Using double quotation marks inside a block resulted in a false positive result with ZOAU 1.3. Fix now handles this special case to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/1340). +- zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets the correct size for PDS/Es. (https://github.com/ansible-collections/ibm_zos_core/pull/1443). +- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1220). +- zos_job_submit - when the argument max_rc was different than 0 the changed response returned as false. Fix now return a changed response as true when the rc is not 0 and max_rc is above or equal to the value of the job. (https://github.com/ansible-collections/ibm_zos_core/pull/1345). +- zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created during the module execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1320). + +v1.9.0 +====== + +Release Summary +--------------- + +Release Date: '2024-03-11' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Major Changes +------------- + +- zos_job_submit - when job statuses were read, were limited to AC (active), CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC (security error), JCLERROR (job had a jcl error). Now the additional statuses are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + +Minor Changes +------------- + +- zos_apf - Improves exception handling if there is a failure parsing the command response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). +- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). +- zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). +- zos_job_submit - The module had undocumented parameter and uses as temporary file when the location of the file is LOCAL. Change now uses the same name as the src for the temporary file removing the addition of tmp_file to the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). +- zos_job_submit - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). +- zos_mvs_raw - when using the dd_input content option for instream-data, if the content was not properly indented according to the program which is generally a blank in columns 1 & 2, those columns would be truncated. Now, when setting instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all content types; string, list of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - zos_mvs_raw - no examples were included with the module that demonstrated using a YAML block indicator, this now includes examples using a YAML block indicator. +- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). + +Bugfixes +-------- + +- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). +- zos_apf - When operation=list was selected and more than one data set entry was fetched, the module only returned one data set. Fix now returns the complete list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). +- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). +- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). +- zos_data_set - Fixes a small parsing bug in module_utils/data_set function which extracts volume serial(s) from a LISTCAT command output. Previously a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). +- zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). +- zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). +- zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained JCLERROR followed by an integer where the integer appeared to be a reason code when actually it is a multi line marker used to coordinate errors spanning more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when a response was returned, it contained an undocumented property; ret_code[msg_text]. Now when a response is returned, it correctly returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=copy was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=hold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=jchhold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=scan was used in JCL, it would fail the module. Now typrun=scan no longer fails the module and an appropriate message is returned with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when wait_time_s was used, the duration would run approximately 5 second longer than reported in the duration. Now the when duration is returned, it is the actual accounting from when the job is submitted to when the module reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). +- zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). +- zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). + +v1.8.0 +====== + +Release Summary +--------------- + +Release Date: '2023-12-08' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- module_utils/template - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_archive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_archive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) +- zos_copy - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) +- zos_copy - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). +- zos_copy - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) +- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) +- zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) +- zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_job_submit - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_script - Add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). +- zos_tso_command - Add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). +- zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) + +Deprecated Features +------------------- + +- zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). + +Bugfixes +-------- + +- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). +- zos_copy - When copying an executable data set from controller to managed node, copy operation failed with an encoding error. Fix now avoids encoding when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). +- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). +- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). +- zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) +- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) +- zos_operator - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. https://github.com/ansible-collections/ibm_zos_core/pull/918) +- zos_operator - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_operator_action_query - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). + +Known Issues +------------ + +- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) (https://github.com/ansible-collections/ibm_zos_core/issues/972) +- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) + +New Modules +----------- + +- ibm.ibm_zos_core.zos_script - Run scripts in z/OS + +v1.7.0 +====== + +Release Summary +--------------- + +Release Date: '2023-10-09' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Major Changes +------------- + +- zos_copy - Previously, backups were taken when force was set to false; whether or not a user specified this operation which caused allocation issues with space and permissions. This removes the automatic backup performed and reverts to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) + +Minor Changes +------------- + +- Add support for Jinja2 templates in zos_copy and zos_job_submit when using local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) +- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, space_type and type in the return output when the destination data set does not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) +- zos_data_set - record format = 'F' has been added to support 'fixed' block records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) +- zos_job_output - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, and queue_position to the return output when querying or submitting a job. Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) +- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. This change removes those resulting in a performance increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) +- zos_job_query - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_submit - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + +Bugfixes +-------- + +- module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) +- zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) +- zos_copy - Reported a false positive such that the response would have `changed=true` when copying from a source (src) or destination (dest) data set that was in use (DISP=SHR). This change now displays an appropriate error message and returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). +- zos_copy - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. This change ensures proper test coverage for nested directories and file permissions. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. Fix now encodes all content in a source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). +- zos_copy - kept permissions on target directory when copy overwrote files. The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) +- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_fetch - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_job_output - Error message did not specify the job not found. Fix now specifies the job_id or job_name being searched to ensure more information is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) +- zos_operator - Reported a failure caused by unrelated error response. Fix now gives a transparent response of the operator to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/762). + +New Modules +----------- + +- ibm.ibm_zos_core.zos_archive - Archive files and data sets on z/OS. +- ibm.ibm_zos_core.zos_unarchive - Unarchive files and data sets in z/OS. + +v1.9.0 +====== + +Release Summary +--------------- + +Release Date: '2024-03-11' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Major Changes +------------- + +- zos_job_submit - when job statuses were read, were limited to AC (active), CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC (security error), JCLERROR (job had a jcl error). Now the additional statuses are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + +Minor Changes +------------- + +- zos_apf - Improves exception handling if there is a failure parsing the command response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). +- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). +- zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). +- zos_job_submit - The module had undocumented parameter and uses as temporary file when the location of the file is LOCAL. Change now uses the same name as the src for the temporary file removing the addition of tmp_file to the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). +- zos_job_submit - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). +- zos_mvs_raw - when using the dd_input content option for instream-data, if the content was not properly indented according to the program which is generally a blank in columns 1 & 2, those columns would be truncated. Now, when setting instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all content types; string, list of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - zos_mvs_raw - no examples were included with the module that demonstrated using a YAML block indicator, this now includes examples using a YAML block indicator. +- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). + +Bugfixes +-------- + +- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). +- zos_apf - When operation=list was selected and more than one data set entry was fetched, the module only returned one data set. Fix now returns the complete list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). +- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). +- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). +- zos_data_set - Fixes a small parsing bug in module_utils/data_set function which extracts volume serial(s) from a LISTCAT command output. Previously a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). +- zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). +- zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). +- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). +- zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained JCLERROR followed by an integer where the integer appeared to be a reason code when actually it is a multi line marker used to coordinate errors spanning more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when a response was returned, it contained an undocumented property; ret_code[msg_text]. Now when a response is returned, it correctly returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=copy was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=hold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=jchhold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when typrun=scan was used in JCL, it would fail the module. Now typrun=scan no longer fails the module and an appropriate message is returned with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_job_submit - when wait_time_s was used, the duration would run approximately 5 second longer than reported in the duration. Now the when duration is returned, it is the actual accounting from when the job is submitted to when the module reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). +- zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). +- zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). +- zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). + +v1.8.0 +====== + +Release Summary +--------------- + +Release Date: '2023-12-08' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- module_utils/template - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_archive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_archive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) +- zos_copy - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) +- zos_copy - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). +- zos_copy - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) +- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) +- zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) +- zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_job_submit - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) +- zos_script - Add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). +- zos_tso_command - Add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). +- zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) +- zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) + +Deprecated Features +------------------- + +- zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). + +Bugfixes +-------- + +- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). +- zos_copy - When copying an executable data set from controller to managed node, copy operation failed with an encoding error. Fix now avoids encoding when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). +- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). +- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). +- zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) +- zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) +- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) +- zos_operator - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. https://github.com/ansible-collections/ibm_zos_core/pull/918) +- zos_operator - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_operator_action_query - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). +- zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). + +Known Issues +------------ + +- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) (https://github.com/ansible-collections/ibm_zos_core/issues/972) +- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) + +New Modules +----------- + +- ibm.ibm_zos_core.zos_script - Run scripts in z/OS + +v1.7.0 +====== + +Release Summary +--------------- + +Release Date: '2023-10-09' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Major Changes +------------- + +- zos_copy - Previously, backups were taken when force was set to false; whether or not a user specified this operation which caused allocation issues with space and permissions. This removes the automatic backup performed and reverts to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) + +Minor Changes +------------- + +- Add support for Jinja2 templates in zos_copy and zos_job_submit when using local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) +- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, space_type and type in the return output when the destination data set does not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) +- zos_data_set - record format = 'F' has been added to support 'fixed' block records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) +- zos_job_output - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, and queue_position to the return output when querying or submitting a job. Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) +- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. This change removes those resulting in a performance increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) +- zos_job_query - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_job_submit - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) +- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + +Bugfixes +-------- + +- module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). +- zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) +- zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) +- zos_copy - Reported a false positive such that the response would have `changed=true` when copying from a source (src) or destination (dest) data set that was in use (DISP=SHR). This change now displays an appropriate error message and returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). +- zos_copy - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. This change ensures proper test coverage for nested directories and file permissions. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. Fix now encodes all content in a source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). +- zos_copy - kept permissions on target directory when copy overwrote files. The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) +- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). +- zos_fetch - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). +- zos_job_output - Error message did not specify the job not found. Fix now specifies the job_id or job_name being searched to ensure more information is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) +- zos_operator - Reported a failure caused by unrelated error response. Fix now gives a transparent response of the operator to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/762). + +New Modules +----------- + +- ibm.ibm_zos_core.zos_archive - Archive files and data sets on z/OS. +- ibm.ibm_zos_core.zos_unarchive - Unarchive files and data sets in z/OS. v1.9.0 ====== @@ -336,7 +739,6 @@ in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Bugfixes -------- @@ -360,7 +762,6 @@ in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Major Changes ------------- @@ -432,7 +833,6 @@ in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Minor Changes ------------- @@ -461,7 +861,6 @@ in this collection. For additional details such as required dependencies and availablity review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Bugfixes -------- @@ -486,7 +885,6 @@ in this collection. For additional details such as required dependencies and availablity review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Bugfixes -------- @@ -505,7 +903,6 @@ in this collection. For additional details such as required dependencies and availablity review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Bugfixes -------- @@ -540,7 +937,6 @@ the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ inventory and a variables configuration. - Automate software management with SMP/E Playbooks - Minor Changes ------------- @@ -619,7 +1015,6 @@ in this collection. For additional details such as required dependencies and availablity review the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - Minor Changes ------------- diff --git a/README.md b/README.md index b2345c118..9b33194eb 100644 --- a/README.md +++ b/README.md @@ -1,85 +1,220 @@ -IBM z/OS core collection -======================== - -The **IBM® z/OS® core collection**, also represented as -**ibm_zos_core** in this document, is part of the broader -initiative to bring Ansible Automation to IBM Z® through the offering -**Red Hat® Ansible Certified Content for IBM Z®**. The -**IBM z/OS core collection** supports automation tasks such as -creating data sets, submitting jobs, querying jobs, retrieving job output, -encoding data, fetching data sets, copying data sets, -executing operator commands, executing TSO commands, ping, -querying operator actions, APF authorizing libraries, -editing textual data in data sets or Unix System Services files, -finding data sets, backing up and restoring data sets and -volumes, mounting file systems, running z/OS programs without JCL, -running local and remote scripts on z/OS, initializing volumes, -archiving, unarchiving and templating with Jinja. - - -Red Hat Ansible Certified Content for IBM Z -=========================================== - -**Red Hat® Ansible Certified Content for IBM Z** provides the ability to -connect IBM Z® to clients' wider enterprise automation strategy through the -Ansible Automation Platform ecosystem. This enables development and operations -automation on Z through a seamless, unified workflow orchestration with -configuration management, provisioning, and application deployment in -one easy-to-use platform. - -The **IBM z/OS core collection** is following the -**Red Hat® Ansible Certified Content for IBM Z®** method of distributing -content. Collections will be developed in the open, and when content is ready -for use, it is released to -[Ansible Galaxy](https://galaxy.ansible.com/ui/) -for community adoption. Once contributors review community usage, feedback, -and are satisfied with the content published, the collection will then be -released to [Ansible Automation Hub](https://www.ansible.com/products/automation-hub) -as **certified** and **IBM supported** for -**Red Hat® Ansible Automation Platform subscribers**. - -For guides and reference, please review the [documentation](https://ibm.github.io/z_ansible_collections_doc/index.html). - -Features -======== -The **IBM z/OS core collection**, includes -[connection plugins](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/plugins.html#connection), -[action plugins](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/plugins.html#action), -[modules](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/modules.html), -[filters](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/filters.html), -and ansible-doc to automate tasks on z/OS. - -Ansible version compatibility -============================= -This collection has been tested against **Ansible Core** versions >=2.15. -The Ansible Core versions supported for this collection align to the -[ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). Review the -[Ansible community changelogs](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-community-changelogs) for corresponding **Ansible community packages** -and **ansible-core**. - -For **Ansible Automation Platform** (AAP) users, review the -[Ansible Automation Platform Certified Content](https://access.redhat.com/support/articles/ansible-automation-platform-certified-content) -and [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform) -for more more information on supported versions of Ansible. - -Other Dependencies -================== -This release of the **IBM z/OS core collection** requires the z/OS managed node have the following: -- [z/OS](https://www.ibm.com/docs/en/zos) -- [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). -- [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) -- [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau/1.2.x) -For specific dependency versions, please review the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) for the version of the IBM Ansible z/OS core installed. - -Copyright -========= -© Copyright IBM Corporation 2020-2024. - -License -======= -Some portions of this collection are licensed under [GNU General Public -License, Version 3.0](https://opensource.org/licenses/GPL-3.0), and -other portions of this collection are licensed under [Apache License, -Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). +# IBM® z/OS® core collection +The **IBM z/OS core** collection enables Ansible to interact with z/OS Data Sets and USS files. The collection focuses on operating system fundamental operations such as managing encodings, creating data sets, and submitting jobs. + +## Description + +The **IBM z/OS core** collection is part of the **Red Hat® Ansible Certified Content for IBM Z®** offering that brings Ansible automation to IBM Z®. This collection brings forward the possibility to manage batch jobs, perform program authorizations, run operator operations, and execute both JES and MVS commands as well as execute shell, python, and REXX scripts. It supports data set creation, searching, copying, fetching, and encoding. It provides both archiving and unarchiving of data sets, initializing volumes, performing backups and supports Jinja templating. + +System programmers can enable pipelines to setup, tear down and deploy applications while system administrators can automate time consuming repetitive tasks inevitably freeing up their time. New z/OS users can find comfort in Ansible's familiarity and expedite their proficiency in record time. + +## Requirements + +Before you install the IBM z/OS core collection, you must configure a control node and managed node with a minimum set of requirements. +The following [table](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/releases_maintenance.html) details the specific software requirements for the controller and managed node. + +### Ansible Controller + +This release of the collection requires **ansible-core >=2.15** (Ansible >=8.x), for additional requirements such as Python, review the [support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). + +### Managed Node + +This release of the collection requires the following +* [z/OS](https://www.ibm.com/docs/en/zos) V2R4 (or later) but prior to version V3R1. +* [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). +* [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11. +* [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau) 1.3.0 or later. + +## Installation + +Before using this collection, you need to install it with the Ansible Galaxy command-line tool: + +```sh +ansible-galaxy collection install ibm.ibm_zos_core +``` + +<p> </p> +You can also include it in a requirements.yml file and install it with `ansible-galaxy collection install -r requirements.yml`, using the format: + +```sh +collections: + - name: ibm.ibm_zos_core +``` + +<p> </p> +Note that if you install the collection from Ansible Galaxy, it will not be upgraded automatically when you upgrade the Ansible package. +To upgrade the collection to the latest available version, run the following command: + +```sh +ansible-galaxy collection install ibm.ibm_zos_core --upgrade +``` + +<p> </p> +You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax to install version 1.0.0: + +```sh +ansible-galaxy collection install ibm.ibm_zos_core:1.0.0 +``` + +<p> </p> +You can also install a beta version of the collection. A beta version is only available on Galaxy and is only supported by the community until it is promoted to General Availability (GA). Use the following syntax to install a beta version: + +```sh +ansible-galaxy collection install ibm.ibm_zos_core:1.10.0-beta.1 +``` + +<p> </p> +As part of the installation, the collection [requirements](#Requirements) must be made available to Ansible through the use of [environment variables](https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/zos_core/configuration_guide.md#environment-variables). The preferred configuration is to place the environment variables in `group_vars` and `host_vars`, you can find examples of this configuration under any [playbook project](https://github.com/IBM/z_ansible_collections_samples), for example, review the **data set** example [configuration](https://github.com/IBM/z_ansible_collections_samples/tree/main/zos_concepts/data_sets/data_set_basics#configuration) documentation. + +<p> </p> +If you are testing a configuration, it can be helpful to set the environment variables in a playbook, an example of that can be reviewed [here](https://github.com/ansible-collections/ibm_zos_core/discussions/657). + +<p> </p> +To learn more about the ZOAU Python wheel installation method, review the [documentation](https://www.ibm.com/docs/en/zoau/1.3.x?topic=installing-zoau#python-wheel-installation-method). + +<p> </p> +If the wheel is installed using the `--target` option, it will install the package into the specified target directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /usr/zoau/wheels`. Using `--target` is recommended, else the wheel will be installed in Python's home directory which may not have write permissions or persist +after an update. + +<p> </p> +If the wheel is installed using the `--user` option, it will install the package into the user directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /u/user`. + +Environment variables: + +```sh +PYZ: "path_to_python_installation_on_zos_target" +ZOAU: "path_to_zoau_installation_on_zos_target" +ZOAU_PYTHONPATH: "path_to_zoau_wheel_installation_directory" + +ansible_python_interpreter: "{{ PYZ }}/bin/python3" + +environment_vars: + _BPXK_AUTOCVT: "ON" + ZOAU_HOME: "{{ ZOAU }}" + PYTHONPATH: "{{ ZOAU_PYTHONPATH }}" + LIBPATH: "{{ ZOAU }}/lib:{{ PYZ }}/lib:/lib:/usr/lib:." + PATH: "{{ ZOAU }}/bin:{{ PYZ }}/bin:/bin:/var/bin" + _CEE_RUNOPTS: "FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)" + _TAG_REDIR_ERR: "txt" + _TAG_REDIR_IN: "txt" + _TAG_REDIR_OUT: "txt" + LANG: "C" + PYTHONSTDINENCODING: "cp1047" +``` + +## Use Cases + +* Use Case Name: Add a new z/OS User + * Actors: + * Application Developer + * Description: + * An application developer can submit a new user request for the system admin to approve. + * Flow: + * Verify user does not exist; create home directory, password, and passphrase + * Create home directory and the user to the system + * Provide access to resource, add to system groups, and define an alias + * Create the users ISPROF data set + * Create user private data set, mount with persistence + * Generate email with login credentials +* Use Case Name: Automate certificate renewals + * Actors: + * System Admin + * Description: + * The system administrator can automate certificate renewals + * Flow: + * Setup, configure and run z/OS Health Checker to generate a report + * Search the Health Checker report for expiring certificates + * Renew expiring certificates + * Collect expiring certificate attributes and backup certificate + * Replicate certificate with a new label + * Generate signing request and sign new certificate + * Supersede the old with the new certificate + * Delete old certificate and relabel new certificate with previous certificate name +* Use Case Name: Provision a Liberty Profile Instance + * Actors: + * Application Developer + * Description: + * An application developer can provision an application runtime that accelerates the delivery of cloud-native applications. + * Flow: + * Create and mount a file system for the Liberty profile. + * Create a Liberty Profile instance with optional configurations. + * Enable z/OS authorized services for the Liberty profile. + * Start an angel process or a server process + +## Testing + +All releases, including betas will meet the following test criteria. + + * 100% success for [Functional](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/functional) tests. + * 100% success for [Sanity](https://docs.ansible.com/ansible/latest/dev_guide/testing/sanity/index.html#all-sanity-tests) tests as part of [ansible-test](https://docs.ansible.com/ansible/latest/dev_guide/testing.html#run-sanity-tests). + * 100% success for [pyflakes](https://github.com/PyCQA/pyflakes/blob/main/README.rst). + * 100% success for [ansible-lint](https://ansible.readthedocs.io/projects/lint/) allowing only false positives. + +<p> </p> +This release of the collection was tested with following dependencies. + + * ansible-core v2.15.x + * Python 3.9.x + * IBM Open Enterprise SDK for Python 3.11.x + * IBM Z Open Automation Utilities (ZOAU) 1.3.0.x + * z/OS V2R5 + +This release introduces case sensitivity for option values and includes a porting guide in the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) to assist with which option values will need to be updated. + +## Contributing + +This community is not currently accepting contributions. However, we encourage you to open [git issues](https://github.com/ansible-collections/ibm_zos_core/issues) for bugs, comments or feature requests and check back periodically for when community contributions will be accepted in the near future. + +Review the [development docs](https://ibm.github.io/z_ansible_collections_doc/zhmc-ansible-modules/docs/source/development.html#development) to learn how you can create an environment and test the collections modules. + +## Communication + +If you would like to communicate with this community, you can do so through the following options. + + * GitHub [discussions](https://github.com/ansible-collections/ibm_zos_core/discussions). + * GitHub [issues](https://github.com/ansible-collections/ibm_zos_core/issues/new/choose). + * [Ansible Forum](https://forum.ansible.com/), please use the `zos` tag to ensure proper awareness. + * Discord [System Z Enthusiasts](https://forum.ansible.com/) room [ansible](https://discord.gg/nKC8F89v). + * Matrix Ansible room [ansible-zos](#ansible-zos:matrix.org). + * Ansible community Matrix [rooms](https://docs.ansible.com/ansible/latest/community/communication.html#general-channels). + +## Support + +As Red Hat Ansible [Certified Content](https://catalog.redhat.com/software/search?target_platforms=Red%20Hat%20Ansible%20Automation%20Platform), this collection is entitled to [support](https://access.redhat.com/support/) through [Ansible Automation Platform](https://www.redhat.com/en/technologies/management/ansible) (AAP). After creating a Red Hat support case, if it is determined the issue belongs to IBM, Red Hat will instruct you to create an [IBM support case](https://www.ibm.com/mysupport/s/createrecord/NewCase) and share the case number with Red Hat so that a collaboration can begin between Red Hat and IBM. + +<p> </p> +If a support case cannot be opened with Red Hat and the collection has been obtained either from [Galaxy](https://galaxy.ansible.com/ui/) or [GitHub](https://github.com/ansible-collections/ibm_zos_core), there is community support available at no charge. Community support is limited to the collection; community support does not include any of the Ansible Automation Platform components, [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau), [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) or [ansible-core](https://github.com/ansible/ansible). + +<p> </p> +The current supported versions of this collection can be found listed under the [release section](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). + +## Release Notes and Roadmap + +The collection's cumulative release notes can be reviewed [here](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). Note, some collections release before an ansible-core version reaches End of Life (EOL), thus the version of ansible-core that is supported must be a version that is currently supported. + +For AAP users, to see the supported ansible-core versions, review the [AAP Life Cycle](https://access.redhat.com/support/policy/updates/ansible-automation-platform). + +For Galaxy and GitHub users, to see the supported ansible-core versions, review the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). + +<p> </p> +The collection's changelogs can be reviewed in the following table. + +| Version | ansible-core | Ansible | Status | +|---------|--------------|---------|----------------------------| +| 1.11.x | >=2.16.x | >=9.0.x | In development (unreleased)| +| [1.10.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0-beta.1/CHANGELOG.rst) | >=2.15.x | >=8.0.x | In preview | +| [1.9.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 05 Feb 2024 | +| [1.8.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 13 Dec 2023 | +| [1.7.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 10 Oct 2023 | +| [1.6.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.6.0/CHANGELOG.rst) | >=2.9.x | >=2.9.x | 28 June 2023 | +| [1.5.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.5.0/CHANGELOG.rst) | >=2.9.x | >=2.9.x | 25 April 2023 | + +## Related Information + +Example playbooks and use cases can be be found in the [z/OS playbook repository](https://github.com/IBM/z_ansible_collections_samples). +Supplemental content on getting started with Ansible, architecture and use cases is available [here](https://ibm.github.io/z_ansible_collections_doc/reference/helpful_links.html). + +## License Information + +Some portions of this collection are licensed under [GNU General Public License, Version 3.0](https://opensource.org/licenses/GPL-3.0), and other portions of this collection are licensed under [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). See individual files for applicable licenses. \ No newline at end of file diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 4e2979ebb..e5bd167b7 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -135,4 +135,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.9.0 +version: 1.10.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index a8404bf84..6e034e91c 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -78,6 +78,173 @@ releases: name: zos_tso_command namespace: '' release_date: '2022-06-07' + 1.10.0-beta.1: + changes: + breaking_changes: + - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_backup_restore - option ``space_type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``record_format`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``space_type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``type`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - options inside ``batch`` no longer accept uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_submit - option ``location`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``automove`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``fs_type`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` + of ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. + This also applies when defining a ``dd_data_set`` inside ``dd_concat``. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer + accepts uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + bugfixes: + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1261). + - zos_apf - List option only returned one data set. Fix now returns the list + of retrieved data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + - zos_blockinfile - Using double quotation marks inside a block resulted in + a false positive result with ZOAU 1.3. Fix now handles this special case to + avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/1340). + - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets + the correct size for PDS/Es. (https://github.com/ansible-collections/ibm_zos_core/pull/1443). + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1220). + - zos_job_submit - when the argument max_rc was different than 0 the changed + response returned as false. Fix now return a changed response as true when + the rc is not 0 and max_rc is above or equal to the value of the job. (https://github.com/ansible-collections/ibm_zos_core/pull/1345). + - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating + temporary data sets. Fix now honors the value if provided and uses it as High + Level Qualifier for temporary data sets created during the module execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1320). + minor_changes: + - zos_apf - Enhanced error messages when an exception is caught. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + - zos_backup_restore - Add tmp_hlq option to the user interface to override + the default high level qualifier (HLQ) for temporary and backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1265). + - zos_copy - Documented `group` and `owner` options. (https://github.com/ansible-collections/ibm_zos_core/pull/1307). + - zos_copy - Improve zos_copy performance when copying multiple members from + one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1183). + release_summary: 'Release Date: ''2024-05-08'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1032-clean-job_submit-test.yml + - 1152-zos-lineinfile-remove-zos_copy-dependency.yml + - 1156-zos_archive-remove-zos_copy_dep.yml + - 1157-remove-zos-copy-from-zos-encode-tests.yml + - 1165-remove-zos-copy-dep-from-zos-fetch.yml + - 1167-remove-zos-copy-from-zos-blockinfile-tests.yml + - 1169-util-job-zoau-migration.yml + - 1179-remove-zos_encode-from_zos_lineinfile-tests.yml + - 1181-zoau-migration-zos_operator.yml + - 1182-migrate-module-utils-data-set.yml + - 1183-copy-members.yml + - 1184-remove-zos-fetch-dep-from-zos-copy.yml + - 1187-migrate-module-utils-copy.yml + - 1188-migrate-module_utils-backup.yml + - 1189-migrate-module_utils-encode.yml + - 1190-migrate-module_utils-dd_statement.yml + - 1196-zoau-migration-zos_gather_facts.yml + - 1202-doc-gen-script-portability.yml + - 1204-migrate-zos_apf.yml + - 1209-zoau-migration-zos_job_submit.yml + - 1215-Migrate_zos_operator_action_query.yml + - 1216-Validate_module_zos_job_output_migration.yml + - 1217-validate-job-query.yml + - 1218-migrate-zos_encode.yml + - 1220-bugfix-zos_job_submit-default_value.yml + - 1222-zoau-migration-zos_copy.yml + - 1227-migrate-zos_archive.yml + - 1228-zos_find-remove-zos_lineinfile_dep.yml + - 1229-migrate-zos_fetch.yml + - 1237-migrate-zos_mount.yml + - 1238-migrate-zos_unarchive.yml + - 1242-zoau-migration-zos_data_set.yml + - 1256_Migrate_zos_blockinfile_and_lineinfile.yml + - 1257-zoau-import-zos_apf.yml + - 1261-job-submit-non-utf8-chars.yml + - 1265_Migrate_zos_backup_restore.yml + - 1270-quick-fix-len-of-volumes-work-around.yml + - 1286-update-zos_archive-zos_unarchive-docs.yml + - 1295-doc-zos_ping-scp.yml + - 1298-Remove_local_charset_from_zos_fetch.yml + - 1307-update-sanity-zos_copy.yml + - 1320-Zos_mvs_raw_ignores_tmp_hlq.yml + - 1322-update-docstring-encode.yml + - 1331-update-docstring-ickdsf.yml + - 1332-update-docstring-import_handler.yml + - 1333-update-docstring-job.yml + - 1336-update-docstring-validation.yml + - 1340-Work_around_fix_false_positive.yml + - 1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml + - 1347-update-docstring-zos_data_set.yml + - 1348-update-docstring-zos_encode.yml + - 1349-update-docstring-zos_fetch.yml + - 1350-update-docstring-zos_find.yml + - 1351-update-docstring-zos_gather_facts.yml + - 1352-update-docstring-zos_job_output.yml + - 1353-update-docstring-zos_job_query.yml + - 1354-update-docstring-zos_job_submit.yml + - 1355-update-docstring-zos_lineinfile.yml + - 1356-update-docstring-zos_mount.yml + - 1388-lowercase-choices.yml + - 1390-update-docstring-zos_script.yml + - 1391-update-docstring-zos_tso_command.yml + - 1392-update-docstring-zos_volume_init.yml + - 1393-update-docstring-zos_apf.yml + - 1394-Update_docstring-zos_operator_action_query.yml + - 1443-zos_find-filter-size.yml + - 692-changelog-lint-ac-tool.yml + - 971-bug-job_submit-can-stacktrace.yml + - 992-fix-sanity4to6.yml + - v1.10.0-beta.1_summary.yml + release_date: '2024-05-08' 1.2.1: changes: bugfixes: diff --git a/docs/scripts/pre-template.sh b/docs/scripts/pre-template.sh index 3a2ac16d4..4fb77f071 100755 --- a/docs/scripts/pre-template.sh +++ b/docs/scripts/pre-template.sh @@ -22,7 +22,7 @@ # and Jinja2 template, and later sphinx html. # This requries that the ansible collection be prebuilt so that it can find # the template.py within the collection (not within the git project). Thus run -# './ac --ac-build' before the make file that builds doc. +# './ac --ac-build' before the make file that builds doc. ################################################################################ template_doc_source=`ansible-config dump|grep DEFAULT_MODULE_PATH| cut -d'=' -f2|sed 's/[][]//g' | tr -d \'\" |sed 's/modules/doc_fragments\/template.py/g'` diff --git a/docs/source/index.rst b/docs/source/index.rst index c150d27c0..7cea15c03 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -90,10 +90,9 @@ ansible-doc to automate tasks on z/OS. community_guides license +.. toctree:: + :maxdepth: 1 + :caption: Reference - - - - - - + reference/documentation + reference/community diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index 73d616e76..265d3fff5 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -62,7 +62,7 @@ volume 1. The volume serial number. - 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks \ :literal:`\*\*\*\*\*\*`\ , indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. @@ -189,7 +189,7 @@ batch 1. The volume serial number - 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks \ :literal:`\*\*\*\*\*\*`\ , indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. @@ -225,7 +225,7 @@ Examples - name: Add a library (cataloged) to the APF list and persistence zos_apf: library: SOME.SEQUENTIAL.DATASET - force_dynamic: True + force_dynamic: true persistent: data_set_name: SOME.PARTITIONED.DATASET(MEM) - name: Remove a library from the APF list and persistence @@ -243,7 +243,7 @@ Examples batch: - library: SOME.SEQ.DS1 - library: SOME.SEQ.DS2 - sms: True + sms: true - library: SOME.SEQ.DS3 volume: T12345 - name: Print the APF list matching library pattern or volume serial number diff --git a/docs/source/modules/zos_apf.rst-e b/docs/source/modules/zos_apf.rst-e deleted file mode 100644 index ec8e6824c..000000000 --- a/docs/source/modules/zos_apf.rst-e +++ /dev/null @@ -1,318 +0,0 @@ - -:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_apf.py - -.. _zos_apf_module: - - -zos_apf -- Add or remove libraries to Authorized Program Facility (APF) -======================================================================= - - - -.. contents:: - :local: - :depth: 1 - - -Synopsis --------- -- Adds or removes libraries to Authorized Program Facility (APF). -- Manages APF statement persistent entries to a data set or data set member. -- Changes APF list format to "DYNAMIC" or "STATIC". -- Gets the current APF list entries. - - - - - -Parameters ----------- - - -library - The library name to be added or removed from the APF list. - - | **required**: False - | **type**: str - - -state - Ensure that the library is added \ :literal:`state=present`\ or removed \ :literal:`state=absent`\ . - - The APF list format has to be "DYNAMIC". - - | **required**: False - | **type**: str - | **default**: present - | **choices**: absent, present - - -force_dynamic - Will force the APF list format to "DYNAMIC" before adding or removing libraries. - - If the format is "STATIC", the format will be changed to "DYNAMIC". - - | **required**: False - | **type**: bool - | **default**: False - - -volume - The identifier for the volume containing the library specified in the \ :literal:`library`\ parameter. The values must be one the following. - - 1. The volume serial number. - - 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - - 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. - - If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. - - | **required**: False - | **type**: str - - -sms - Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - - If \ :literal:`sms=True`\ , \ :literal:`volume`\ value will be ignored. - - | **required**: False - | **type**: bool - | **default**: False - - -operation - Change APF list format to "DYNAMIC" \ :literal:`operation=set\_dynamic`\ or "STATIC" \ :literal:`operation=set\_static`\ - - Display APF list current format \ :literal:`operation=check\_format`\ - - Display APF list entries when \ :literal:`operation=list`\ \ :literal:`library`\ , \ :literal:`volume`\ and \ :literal:`sms`\ will be used as filters. - - If \ :literal:`operation`\ is not set, add or remove operation will be ignored. - - | **required**: False - | **type**: str - | **choices**: set_dynamic, set_static, check_format, list - - -tmp_hlq - Override the default high level qualifier (HLQ) for temporary and backup datasets. - - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. - - | **required**: False - | **type**: str - - -persistent - Add/remove persistent entries to or from \ :emphasis:`data\_set\_name`\ - - \ :literal:`library`\ will not be persisted or removed if \ :literal:`persistent=None`\ - - | **required**: False - | **type**: dict - - - data_set_name - The data set name used for persisting or removing a \ :literal:`library`\ from the APF list. - - | **required**: True - | **type**: str - - - marker - The marker line template. - - \ :literal:`{mark}`\ will be replaced with "BEGIN" and "END". - - Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. - - \ :literal:`{mark}`\ length may not exceed 72 characters. - - The timestamp (\<timestamp\>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format - - | **required**: False - | **type**: str - | **default**: /* {mark} ANSIBLE MANAGED BLOCK <timestamp> */ - - - backup - Creates a backup file or backup data set for \ :emphasis:`data\_set\_name`\ , including the timestamp information to ensure that you retrieve the original APF list defined in \ :emphasis:`data\_set\_name`\ ". - - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . - - The backup file name will be return on either success or failure of module execution such that data can be retrieved. - - | **required**: False - | **type**: bool - | **default**: False - - - backup_name - Specify the USS file name or data set name for the destination backup. - - If the source \ :emphasis:`data\_set\_name`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. - - If the source is an MVS data set, the backup\_name must be an MVS data set name. - - If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . - - If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. - - | **required**: False - | **type**: str - - - -batch - A list of dictionaries for adding or removing libraries. - - This is mutually exclusive with \ :literal:`library`\ , \ :literal:`volume`\ , \ :literal:`sms`\ - - Can be used with \ :literal:`persistent`\ - - | **required**: False - | **type**: list - | **elements**: dict - - - library - The library name to be added or removed from the APF list. - - | **required**: True - | **type**: str - - - volume - The identifier for the volume containing the library specified on the \ :literal:`library`\ parameter. The values must be one of the following. - - 1. The volume serial number - - 2. Six asterisks (\*\*\*\*\*\*), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - - 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. - - If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. - - | **required**: False - | **type**: str - - - sms - Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - - If true \ :literal:`volume`\ will be ignored. - - | **required**: False - | **type**: bool - | **default**: False - - - - - -Examples --------- - -.. code-block:: yaml+jinja - - - - name: Add a library to the APF list - zos_apf: - library: SOME.SEQUENTIAL.DATASET - volume: T12345 - - name: Add a library (cataloged) to the APF list and persistence - zos_apf: - library: SOME.SEQUENTIAL.DATASET - force_dynamic: True - persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) - - name: Remove a library from the APF list and persistence - zos_apf: - state: absent - library: SOME.SEQUENTIAL.DATASET - volume: T12345 - persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) - - name: Batch libraries with custom marker, persistence for the APF list - zos_apf: - persistent: - data_set_name: "SOME.PARTITIONED.DATASET(MEM)" - marker: "/* {mark} PROG001 USR0010 */" - batch: - - library: SOME.SEQ.DS1 - - library: SOME.SEQ.DS2 - sms: True - - library: SOME.SEQ.DS3 - volume: T12345 - - name: Print the APF list matching library pattern or volume serial number - zos_apf: - operation: list - library: SOME.SEQ.* - volume: T12345 - - name: Set the APF list format to STATIC - zos_apf: - operation: set_static - - - - -Notes ------ - -.. note:: - It is the playbook author or user's responsibility to ensure they have appropriate authority to the RACF® FACILITY resource class. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. - - To add or delete the APF list entry for library libname, you must have UPDATE authority to the RACF® FACILITY resource class entity CSVAPF.libname, or there must be no FACILITY class profile that protects that entity. - - To change the format of the APF list to dynamic, you must have UPDATE authority to the RACF FACILITY resource class profile CSVAPF.MVS.SETPROG.FORMAT.DYNAMIC, or there must be no FACILITY class profile that protects that entity. - - To change the format of the APF list back to static, you must have UPDATE authority to the RACF FACILITY resource class profile CSVAPF.MVS.SETPROG.FORMAT.STATIC, or there must be no FACILITY class profile that protects that entity. - - - - - - - -Return Values -------------- - - -stdout - The stdout from ZOAU command apfadm. Output varies based on the type of operation. - - state\> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm - - operation\> stdout of operation options list\> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set\_dynamic\> Set to DYNAMIC set\_static\> Set to STATIC check\_format\> DYNAMIC or STATIC - - | **returned**: always - | **type**: str - -stderr - The error messages from ZOAU command apfadm - - | **returned**: always - | **type**: str - | **sample**: BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list. - -rc - The return code from ZOAU command apfadm - - | **returned**: always - | **type**: int - -msg - The module messages - - | **returned**: failure - | **type**: str - | **sample**: Parameter verification failed - -backup_name - Name of the backup file or data set that was created. - - | **returned**: if backup=true, always - | **type**: str - diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 3249f3ba8..b900fdcdb 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -374,7 +374,7 @@ Examples name: terse format_options: terse_pack: "spack" - use_adrdssu: True + use_adrdssu: true # Use a pattern to store - name: Compress data set pattern using xmit diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index 6833279fa..e8216dd3e 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -241,7 +241,7 @@ Examples data_sets: include: user.** backup_name: /tmp/temp_backup.dzp - recover: yes + recover: true - name: Backup all datasets matching the pattern USER.** to data set MY.BACKUP.DZP, allocate 100MB for data sets used in backup process. @@ -271,7 +271,7 @@ Examples operation: backup backup_name: /tmp/temp_backup.dzp volume: MYVOL1 - full_volume: yes + full_volume: true space: 1 space_type: g @@ -314,7 +314,7 @@ Examples zos_backup_restore: operation: restore volume: MYVOL2 - full_volume: yes + full_volume: true backup_name: MY.BACKUP.DZP space: 1 space_type: g diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 5ea5bf3ef..01647e010 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -635,7 +635,7 @@ Examples zos_copy: src: /path/to/foo.conf dest: /etc/foo.conf - mode: 0644 + mode: "0644" group: foo owner: bar diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 3300c7d40..caed66ba9 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -59,6 +59,9 @@ state If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided \ :emphasis:`volumes`\ . If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . + If \ :emphasis:`state=absent`\ and \ :emphasis:`type=gdg`\ and the GDG base has active generations the module will complete successfully with \ :emphasis:`changed=False`\ . To remove it option \ :emphasis:`force`\ needs to be used. If the GDG base does not have active generations the module will complete successfully with \ :emphasis:`changed=True`\ . + + If \ :emphasis:`state=present`\ and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with \ :emphasis:`changed=True`\ . @@ -102,7 +105,7 @@ type | **required**: False | **type**: str | **default**: pds - | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs + | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs, gdg space_primary @@ -235,6 +238,74 @@ key_length | **type**: int +empty + Sets the \ :emphasis:`empty`\ attribute for Generation Data Groups. + + If false, removes only the oldest GDS entry when a new GDS is created that causes GDG limit to be exceeded. + + If true, removes all GDS entries from a GDG base when a new GDS is created that causes the GDG limit to be exceeded. + + Default is false. + + | **required**: False + | **type**: bool + + +extended + Sets the \ :emphasis:`extended`\ attribute for Generation Data Groups. + + If false, allow up to 255 generation data sets (GDSs) to be associated with the GDG. + + If true, allow up to 999 generation data sets (GDS) to be associated with the GDG. + + Default is false. + + | **required**: False + | **type**: bool + + +fifo + Sets the \ :emphasis:`fifo`\ attribute for Generation Data Groups. + + If false, the order is the newest GDS defined to the oldest GDS. This is the default value. + + If true, the order is the oldest GDS defined to the newest GDS. + + Default is false. + + | **required**: False + | **type**: bool + + +limit + Sets the \ :emphasis:`limit`\ attribute for Generation Data Groups. + + Specifies the maximum number, from 1 to 255(up to 999 if extended), of GDS that can be associated with the GDG being defined. + + \ :emphasis:`limit`\ is required when \ :emphasis:`type=gdg`\ . + + | **required**: False + | **type**: int + + +purge + Sets the \ :emphasis:`purge`\ attribute for Generation Data Groups. + + Specifies whether to override expiration dates when a generation data set (GDS) is rolled off and the \ :literal:`scratch`\ option is set. + + | **required**: False + | **type**: bool + + +scratch + Sets the \ :emphasis:`scratch`\ attribute for Generation Data Groups. + + Specifies what action is to be taken for a generation data set located on disk volumes when the data set is uncataloged from the GDG base as a result of EMPTY/NOEMPTY processing. + + | **required**: False + | **type**: bool + + volumes If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. @@ -285,7 +356,9 @@ force The \ :emphasis:`force=True`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . - The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ . + The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and when removing a GDG base with active generations. + + If \ :emphasis:`force=True`\ , \ :emphasis:`type=gdg`\ and \ :emphasis:`state=absent`\ it will force remove a GDG base with active generations. | **required**: False | **type**: bool @@ -375,7 +448,7 @@ batch | **required**: False | **type**: str | **default**: pds - | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs + | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, library, basic, large, member, hfs, zfs, gdg space_primary @@ -508,6 +581,74 @@ batch | **type**: int + empty + Sets the \ :emphasis:`empty`\ attribute for Generation Data Groups. + + If false, removes only the oldest GDS entry when a new GDS is created that causes GDG limit to be exceeded. + + If true, removes all GDS entries from a GDG base when a new GDS is created that causes the GDG limit to be exceeded. + + Default is false. + + | **required**: False + | **type**: bool + + + extended + Sets the \ :emphasis:`extended`\ attribute for Generation Data Groups. + + If false, allow up to 255 generation data sets (GDSs) to be associated with the GDG. + + If true, allow up to 999 generation data sets (GDS) to be associated with the GDG. + + Default is false. + + | **required**: False + | **type**: bool + + + fifo + Sets the \ :emphasis:`fifo`\ attribute for Generation Data Groups. + + If false, the order is the newest GDS defined to the oldest GDS. This is the default value. + + If true, the order is the oldest GDS defined to the newest GDS. + + Default is false. + + | **required**: False + | **type**: bool + + + limit + Sets the \ :emphasis:`limit`\ attribute for Generation Data Groups. + + Specifies the maximum number, from 1 to 255(up to 999 if extended), of GDS that can be associated with the GDG being defined. + + \ :emphasis:`limit`\ is required when \ :emphasis:`type=gdg`\ . + + | **required**: False + | **type**: int + + + purge + Sets the \ :emphasis:`purge`\ attribute for Generation Data Groups. + + Specifies whether to override expiration dates when a generation data set (GDS) is rolled off and the \ :literal:`scratch`\ option is set. + + | **required**: False + | **type**: bool + + + scratch + Sets the \ :emphasis:`scratch`\ attribute for Generation Data Groups. + + Specifies what action is to be taken for a generation data set located on disk volumes when the data set is uncataloged from the GDG base as a result of EMPTY/NOEMPTY processing. + + | **required**: False + | **type**: bool + + volumes If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. @@ -588,7 +729,7 @@ Examples space_type: m record_format: u record_length: 25 - replace: yes + replace: true - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: @@ -599,7 +740,7 @@ Examples record_format: u record_length: 25 volumes: "222222" - replace: yes + replace: true - name: Create an ESDS data set if it does not exist zos_data_set: @@ -634,7 +775,7 @@ Examples zos_data_set: name: someds.name.here(mydata) type: member - replace: yes + replace: true - name: Write a member to an existing PDS; do not replace if member exists zos_data_set: @@ -652,22 +793,22 @@ Examples name: someds.name.here(mydata) state: absent type: member - force: yes + force: true - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: - - name: someds.name.here1 + - name: someds.name.here1 type: pds space_primary: 5 space_type: m record_format: fb - replace: yes + replace: true - name: someds.name.here1(member1) type: member - name: someds.name.here2(member1) type: member - replace: yes + replace: true - name: someds.name.here2(member2) type: member diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 68089a3a6..1ae892acc 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -143,8 +143,8 @@ Examples encoding: from: IBM-1047 to: ISO8859-1 - backup: yes - backup_compress: yes + backup: true + backup_compress: true - name: Convert file encoding from IBM-1047 to ISO8859-1 to a directory zos_encode: @@ -256,7 +256,6 @@ Examples - Notes ----- diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index 983e5ca0b..e8d0b0eb2 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -239,15 +239,14 @@ Examples src: /tmp/src/somefile regexp: '^(.*)User(\d+)m(.*)$' line: '\1APPUser\3' - backrefs: yes + backrefs: true - name: Add a line to a member while a task is in execution zos_lineinfile: src: SOME.PARTITIONED.DATA.SET(DATA) insertafter: EOF line: 'Should be a working test now' - force: True - + force: true diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index 9a30c5c91..5bd283453 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -320,8 +320,8 @@ Examples fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - comment: For Tape2 project + data_store: SYS1.PARMLIB(BPXPRMAA) + comment: For Tape2 project - name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB. zos_mount: @@ -330,10 +330,10 @@ Examples fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - backup: Yes - backup_name: SYS1.PARMLIB(BPXPRMAB) - comment: For Tape2 project + data_store: SYS1.PARMLIB(BPXPRMAA) + backup: true + backup_name: SYS1.PARMLIB(BPXPRMAB) + comment: For Tape2 project - name: Mount a filesystem ignoring uid/gid values. zos_mount: @@ -341,7 +341,7 @@ Examples path: /u/omvsadm/core fs_type: zfs state: mounted - allow_uid: no + allow_uid: false - name: Mount a filesystem asynchronously (don't wait for completion). zos_mount: @@ -380,7 +380,6 @@ Examples - Notes ----- diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index c0551786e..f48418264 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -1464,7 +1464,7 @@ Examples dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1488,7 +1488,7 @@ Examples dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1533,7 +1533,7 @@ Examples dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1561,8 +1561,8 @@ Examples dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - replace: yes - backup: yes + replace: true + backup: true type: seq space_primary: 5 space_secondary: 1 @@ -1633,7 +1633,7 @@ Examples - name: Take a set of data sets and write them to an archive. zos_mvs_raw: program_name: adrdssu - auth: yes + auth: true dds: - dd_data_set: dd_name: archive @@ -1649,7 +1649,7 @@ Examples - name: Merge two sequential data sets and write them to new data set zos_mvs_raw: program_name: sort - auth: no + auth: false parm: "MSGPRT=CRITICAL,LIST" dds: - dd_data_set: @@ -1680,7 +1680,7 @@ Examples files. zos_mvs_raw: pgm: idcams - auth: yes + auth: true dds: - dd_concat: dd_name: sysprint @@ -1697,57 +1697,56 @@ Examples dd_name: sysin content: " LISTCAT ENTRIES('SYS1.*')" - - name: Drop the contents of input dataset into output dataset - using REPRO command. + - name: Drop the contents of input dataset into output dataset using REPRO command. zos_mvs_raw: pgm: idcams - auth: yes + auth: true dds: - - dd_data_set: - dd_name: INPUT - data_set_name: myhlq.ds1.input - - dd_data_set: - dd_name: OUTPUT - data_set_name: myhlq.ds1.output - - dd_input: - dd_name: sysin - content: | + - dd_data_set: + dd_name: INPUT + data_set_name: myhlq.ds1.input + - dd_data_set: + dd_name: OUTPUT + data_set_name: myhlq.ds1.output + - dd_input: + dd_name: sysin + content: | " REPRO - INFILE(INPUT) - OUTFILE(OUTPUT)" - - dd_output: - dd_name: sysprint - return_content: - type: text - - - name: Define a cluster using a literal block style indicator - with a 2 space indentation. - zos_mvs_raw: - program_name: idcams - auth: yes - dds: - - dd_output: - dd_name: sysprint - return_content: - type: text - - dd_input: - dd_name: sysin - content: |2 - DEFINE CLUSTER - - (NAME(ANSIBLE.TEST.VSAM) - - CYL(10 10) - - FREESPACE(20 20) - - INDEXED - - KEYS(32 0) - - NOERASE - - NONSPANNED - - NOREUSE - - SHAREOPTIONS(3 3) - - SPEED - - UNORDERED - - RECORDSIZE(4086 32600) - - VOLUMES(222222) - - UNIQUE) + - dd_output: + dd_name: sysprint + return_content: + type: text + + - name: Define a cluster using a literal block style indicator + with a 2 space indentation. + zos_mvs_raw: + program_name: idcams + auth: true + dds: + - dd_output: + dd_name: sysprint + return_content: + type: text + - dd_input: + dd_name: sysin + content: 2 + DEFINE CLUSTER - + (NAME(ANSIBLE.TEST.VSAM) - + CYL(10 10) - + FREESPACE(20 20) - + INDEXED - + KEYS(32 0) - + NOERASE - + NONSPANNED - + NOREUSE - + SHAREOPTIONS(3 3) - + SPEED - + UNORDERED - + RECORDSIZE(4086 32600) - + VOLUMES(222222) - + UNIQUE) @@ -1843,3 +1842,15 @@ backups | **type**: str +stdout + The stdout from a USS command or MVS command, if applicable. + + | **returned**: always + | **type**: str + +stderr + The stderr of a USS command or MVS command, if applicable. + + | **returned**: failure + | **type**: str + diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index ff1e5fe87..6509ac286 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -33,6 +33,10 @@ cmd For example, change the command "...,P='DSN3EPX,-DBC1,S'" to "...,P=''DSN3EPX,-DBC1,S'' ". + If the command contains any special characters ($, &, etc), they must be escaped using double backslashes like \\\\\\$. + + For example, to display job by job name the command would be \ :literal:`cmd:"\\\\$dj''HELLO''"`\ + | **required**: True | **type**: str @@ -92,6 +96,12 @@ Examples +Notes +----- + +.. note:: + Commands may need to use specific prefixes like $, they can be discovered by issuing the following command \ :literal:`D OPDATA,PREFIX`\ . + diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index a03a17fdc..b7956c8b8 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -128,7 +128,7 @@ Examples system: mv29 message_filter: filter: ^.*IMS.*$ - use_regex: yes + use_regex: true diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index a53747d6c..ed6a26a8f 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -392,8 +392,8 @@ Examples format: name: xmit format_options: - use_adrdssu: True - list: True + use_adrdssu: true + list: true diff --git a/docs/source/modules/zos_volume_init.rst b/docs/source/modules/zos_volume_init.rst index 25a0897b9..a2b6f25ab 100644 --- a/docs/source/modules/zos_volume_init.rst +++ b/docs/source/modules/zos_volume_init.rst @@ -159,14 +159,14 @@ Examples zos_volume_init: address: "1234" volid: "DEMO01" - sms_managed: no + sms_managed: false - name: Initialize non-SMS managed target volume with all the default options and override the default high level qualifier (HLQ). zos_volume_init: address: 1234 volid: DEMO01 - sms_managed: no + sms_managed: false tmp_hlq: TESTUSR - name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as @@ -175,12 +175,12 @@ Examples zos_volume_init: address: e8d8 vtoc_size: 30 - index: yes - sms_managed: yes + index: true + sms_managed: true volid: ine8d8 verify_volid: ine8d8 - verify_volume_empty: yes - verify_offline: no + verify_volume_empty: true + verify_offline: false - name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03' using Ansible loops. diff --git a/docs/source/plugins.rst b/docs/source/plugins.rst index ef0f6c183..3c8858f47 100644 --- a/docs/source/plugins.rst +++ b/docs/source/plugins.rst @@ -21,7 +21,7 @@ user action is required, this documentation is reference only. * `zos_job_submit`_: Used to submit a job from the controller to the z/OS manage node. * `zos_ping`_: Used to transfer the modules REXX source to the z/OS managed node. * `zos_script`_: Used to transfer scripts from the controller to the z/OS manage node. -* `_zos_unarchive`_: Used to transfer archives from the controller to the z/OS manage node. +* `zos_unarchive`_: Used to transfer archives from the controller to the z/OS manage node. .. _zos_copy: modules/zos_copy.html @@ -35,3 +35,4 @@ user action is required, this documentation is reference only. modules/zos_script.html .. _zos_unarchive: modules/zos_unarchive.html + diff --git a/docs/source/reference/community.rst b/docs/source/reference/community.rst new file mode 100644 index 000000000..41bdbe9b0 --- /dev/null +++ b/docs/source/reference/community.rst @@ -0,0 +1,17 @@ +.. ........................................................................... +.. © Copyright IBM Corporation 2024 . +.. ........................................................................... + +============ +Contributing +============ + +Contributing to collections as a member of the open source community varies for +each collection. Although the collections come together as a unified solution, +each offering operates on its own; therefore, review the individual collections to learn +how to contribute. + +.. toctree:: + :maxdepth: 1 + + z/OS core </../community_guides> \ No newline at end of file diff --git a/docs/source/reference/documentation.rst b/docs/source/reference/documentation.rst new file mode 100644 index 000000000..9e16806b3 --- /dev/null +++ b/docs/source/reference/documentation.rst @@ -0,0 +1,18 @@ +.. ........................................................................... +.. © Copyright IBM Corporation 2024 . +.. ........................................................................... + +============= +Documentation +============= + +In addition to the common reference material included in Helpful Links, +each collection in the **Red Hat® Ansible Certified Content for IBM Z** +includes supplementary documentation specific to the collection. Examples of +such documentation include Web Services APIs, guidelines for development and +testing the modules, offering-specific reading, etc. + +.. toctree:: + :maxdepth: 1 + + z/OS core <../resources/resources> diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 7c2c3a929..111dc3d05 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -1,11 +1,166 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020, 2024 . +.. © Copyright IBM Corporation 2020, 2024 . .. ........................................................................... ======== Releases ======== +Version 1.10.0-beta.1 +===================== + +Major Changes +------------- + +- Starting with IBM Ansible z/OS core version 1.10.x, ZOAU version 1.3.0 will be required. +- Starting with IBM Ansible z/OS core version 1.10.x, all module options are case sensitive, + review the porting guide for specifics. +- The README has been updated with a new template. +- The **Reference** section has been renamed to **Requirements** and now includes a support matrix. + +Minor Changes +------------- + +- ``zos_apf`` - Enhanced error messages when an exception is caught. +- ``zos_backup_restore`` - Added option **tmp_hlq** to the user module to override the default high level qualifier (HLQ) for temporary and backup data sets. +- ``zos_copy`` - Documented module options `group` and `owner`. + +Bugfixes +-------- + +- ``zos_apf`` - Option **list** previously only returned one data set, now it returns a list of retrieved data sets. +- ``zos_blockinfile`` - Option **block** when containing double double quotation marks results in a task failure (failed=True); now the module handles this case to avoid failure. +- ``zos_find`` - Option **size** failed if a PDS/E matched the pattern, now filtering on utilized size for a PDS/E is supported. + +- ``zos_job_submit`` + + - Did not default to **location=DATA_SET** when no location was defined, now the location defaults to DATA_SET. + - Option **max_rc** previously did not influence a modules status, now the option value influences the tasks failure status. + +- ``zos_mvs_raw`` - Option **tmp_hlq** when creating temporary data sets was previously ignored, now the option honors the High Level Qualifier for temporary data sets created during the module execution. + +Porting Guide +------------- + +This section discusses the behavioral changes between ``ibm_zos_core`` v1.9.0 and ``ibm_zos_core`` v1.10.0-beta.1. +It is intended to assist in updating your playbooks so this collection will continue to work. + +- ``zos_archive`` + + - option **terse_pack** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **record_format** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **space_type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_backup_restore`` - option **space_type** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_copy`` + + - suboption **record_format** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **space_type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_data_set`` + + - option **record_format** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **space_type** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **type** no longer accepts uppercase choices, users should replace them with lowercase ones. + - options inside **batch** no longer accept uppercase choices, users should replace them with lowercase ones. + +- ``zos_job_submit`` - option **location** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_mount`` + + - option **automove** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **fs_type** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **mount_opts** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **tag_untagged** no longer accepts uppercase choices, users should replace them with lowercase ones. + - option **unmount_opts** no longer accepts uppercase choices, users should replace them with lowercase ones. + +- ``zos_mvs_raw`` + + - options inside **dd_concat** no longer accept uppercase choices, users should replace them with lowercase ones. + - suboption **record_format** of **dd_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **record_format** of **dd_unix** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **space_type** of **dd_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **type** of **dd_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboptions **disposition_normal** and **disposition_abnormal** of **dd_data_set** no longer accept **catlg** and **uncatlg** as choices. This also applies when defining a **dd_data_set** inside **dd_concat**. + +- ``zos_unarchive`` + + - suboption **record_format** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **space_type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + - suboption **type** of **dest_data_set** no longer accepts uppercase choices, users should replace them with lowercase ones. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Requirements +------------ + +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. + +Known Issues +------------ + +- ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. + +- ``zos_job_submit``, ``zos_job_output``, ``zos_operator_action_query`` - encounters UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. + + - If you encounter this, some options are to: + + - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. + - Ignore module errors by using **ignore_errors:true** for a specific playbook task. + - If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a registered variable to extract the + job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. + +Version 1.9.1 +============= + +Bugfixes +-------- + +- ``zos_find`` - Option size failed if a PDS/E matched the pattern, now filtering on utilized size for a PDS/E is supported. +- ``zos_mvs_raw`` - Option **tmp_hlq** when creating temporary data sets was previously ignored, now the option honors the High Level Qualifier for temporary data sets created during the module execution. + +Availability +------------ + +* `Automation Hub`_ +* `Galaxy`_ +* `GitHub`_ + +Requirements +------------ + +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. + +Known Issues +------------ + +- ``zos_job_submit`` - when setting 'location' to 'LOCAL' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. +- ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. + +- ``zos_job_submit``, ``zos_job_output``, ``zos_operator_action_query`` - encounters UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. This has been addressed in this release and corrected with **ZOAU version 1.2.5.6** or later. + + - If the appropriate level of ZOAU can not be installed, some options are to: + + - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. + - Ignore module errors by using **ignore_errors:true** for a specific playbook task. + - If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a registered variable to extract the + job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. + +- ``zos_data_set`` - An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended and documented **space_primary** option. + +- In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. + Version 1.9.0 ============= @@ -125,13 +280,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.10`_ - `3.12`_ -* Supported by IBM `Z Open Automation Utilities 1.2.5`_ (or later) but prior to version 1.3. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.8.0 ============= @@ -211,13 +364,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.4`_ (or later) but prior to version 1.3. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.7.0 ============= @@ -278,13 +429,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.3`_ (or later) but prior to version 1.3. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.6.0 ============= @@ -341,13 +490,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- +Requirements +------------ -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.5.0 ============= @@ -457,562 +604,11 @@ Availability * `Galaxy`_ * `GitHub`_ -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ - `3.11`_ -* Supported by IBM `Z Open Automation Utilities 1.2.2`_ (or later) but prior to version 1.3. - -Version 1.4.1 -============= - -Bug fixes ---------- - -* ``zos_copy`` - - * Copy failed from a loadlib member to another loadlib member. Fix - now looks for error in stdout in the if statement to use -X option. - * Fixes a bug where files not encoded in IBM-1047 would trigger an - error while computing the record length for a new destination dataset. - * Fixes a bug where the code for fixing an issue with newlines in - files. - * fixed wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system to - its initial state in case of a module failure only when force is false. - * fixes a bug where the computed record length for a new destination - dataset would include newline characters. - -* ``zos_job_query`` - - * fixes a bug where a boolean was not being properly compared. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.9`_ -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Version 1.4.0 -============= - -* Modules - - * ``zos_mount`` can manage mount operations for a - z/OS UNIX System Services (USS) file system data set. - -* Plugins - - * ``zos_ssh`` connection plugin has been removed from this release and is no - longer a dependency for the ``zos_ping`` module. - -* Bug fixes and enhancements - - * Modules - - * ``zos_copy`` - - * introduced an updated creation policy referred to as precedence rules - that if `dest_data_set` is set, it will take precedence. If - `dest` is an empty data set, the empty data set will be written with the - expectation its attributes satisfy the copy. If no precedent rule - has been exercised, `dest` will be created with the same attributes of - `src`. - * introduced new computation capabilities that if `dest` is a nonexistent - data set, the attributes assigned will depend on the type of `src`. If - `src` is a USS file, `dest` will have a Fixed Block (FB) record format - and the remaining attributes will be computed. If `src` is binary, - `dest` will have a Fixed Block (FB) record format with a record length - of 80, block size of 32760, and the remaining attributes will be - computed. - * enhanced the force option when `force=true` and the remote file or - data set `dest`` is NOT empty, the `dest` will be deleted and recreated - with the `src` data set attributes, otherwise it will be recreated with - the `dest` data set attributes. - * was enhanced for when `src` is a directory and ends with "/", - the contents of it will be copied into the root of `dest`. It it doesn't - end with "/", the directory itself will be copied. - * option `dest_dataset` has been deprecated and removed in favor - of the new option `dest_data_set`. - * fixes a bug that when a directory is copied from the controller to the - managed node and a mode is set, the mode is applied to the directory - on the managed node. If the directory being copied contains files and - mode is set, mode will only be applied to the files being copied not the - pre-existing files. - * fixes a bug that did not create a data set on the specified volume. - * fixes a bug where a number of attributes were not an option when using - `dest_data_set`. - * fixes a bug where options were not defined in the module - argument spec that will result in error when running `ansible-core` - v2.11 and using options `force` or `mode`. - * was enhanced to support the ``ansible.builtin.ssh`` connection options; - for further reference refer to the `SSH plugin`_ documentation. - * was enhanced to take into account the record length when the - source is a USS file and the destination is a data set with a record - length. This is done by inspecting the destination data set attributes - and using these attributes to create a new data set. - * was updated with the capabilities to define destination data sets from - within the ``zos_copy`` module. In the case where you are copying to - data set destination that does not exist, you can now do so using the - new ``zos_copy`` module option ``destination_dataset``. - - * ``zos_operator`` - - * enhanced to allow for MVS operator `SET` command, `SET` is - equivalent to the abbreviated `T` command. - - * ``zos_mount`` fixed option `tag_ccsid` to correctly allow for type int. - - * ``module_utils`` - - * jobs.py - fixes a utility used by module `zos_job_output` that would - truncate the DD content. - - * ``zos_ping`` was enhanced to remove the need for the ``zos_ssh`` - connection plugin dependency. - - * ``zos_fetch`` was enhanced to support the ``ansible.builtin.ssh`` - connection options; for further reference refer to the - `SSH plugin`_ documentation. - - * ``zos_job_output`` - - * was updated to correct possible truncated responses for - the **ddname** content. This would occur for jobs with very large amounts - of content from a **ddname**. - * was enhanced to to include the completion code (CC) for each individual - jop step as part of the ``ret_code`` response. - - * ``zos_job_query`` - - * was enhanced to support a 7 digit job number ID for when there are - greater than 99,999 jobs in the history. - * was enhanced to handle when an invalid job ID or job name is used with - the module and returns a proper response. - - * ``zos_job_submit`` - - * was enhanced to fail fast when a submitted job fails instead of waiting - a predetermined time. - * was enhanced to check for 'JCL ERROR' when jobs are submitted and result - in a proper module response. - - * ``zos_operator_action_query`` response messages were improved with more - diagnostic information in the event an error is encountered. - -* Deprecated or removed - - * ``zos_copy`` module option **destination_dataset** has been renamed to - **dest_data_set**. - * ``zos_ssh`` connection plugin has been removed, it is no longer required. - Remove all playbook references, ie ``connection: ibm.ibm_zos_core.zos_ssh``. - * ``zos_ssh`` connection plugin has been removed, it is no longer required. - You must remove the zos_ssh connection plugin from all playbooks that - reference the plugin, for example connection: ibm.ibm_zos_core.zos_ssh. - * ``zos_copy`` module option **model_ds** has been removed. The model_ds logic - is now automatically managed and data sets are either created based on the - ``src`` data set or overridden by the new option ``destination_dataset``. - * ``zos_copy`` and ``zos_fetch`` option **sftp_port** has been deprecated. To - set the SFTP port, use the supported options in the ``ansible.builtin.ssh`` - plugin. Refer to the `SSH port`_ option to configure the port used during - the modules SFTP transport. - -* Documentation - - * Noteworthy documentation updates have been made to: - - * ``zos_copy`` and ``zos_fetch`` about Co:Z SFTP support. - * ``zos_mvs_raw`` removed a duplicate example. - * all action plugins are documented - * update hyperlinks embedded in documentation. - * ``zos_operator`` to explains how to use single quotes in operator commands. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ `3.8`_` - `3.9`_ -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Known Issues ------------- - -* If a playbook includes the deprecated ``zos_ssh`` connection plugin, for - example ``connection: ibm.ibm_zos_core.zos_ssh``, it will - encounter this error which can corrected by safely removing the plugin: - - .. code-block:: - - "msg": "the connection plugin 'ibm.ibm_zos_core.zos_ssh' was not found" - -* When using the ``zos_ssh`` plugin with **Ansible 2.11** and earlier versions - of this collection, you will encounter the exception: - - .. code-block:: - - AttributeError: module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'. - - This is resolved in this release by deprecating the ``zos_ssh`` connection - plugin and removing all ``connection: ibm.ibm_zos_core.zos_ssh`` references - from playbooks. -* When using module ``zos_copy`` and option ``force`` with ansible versions - greater than **Ansbile 2.10** and earlier versions of this collection, an - unsupported option exception would occur. This is resolved in this release. -* When using the ``zos_copy`` or ``zos_fetch`` modules in earlier versions of - this collection without 'passwordless' SSH configured such that you are using - ``--ask-pass`` or passing an ``ansible_password`` in a configuration; during - the playbook execution a second password prompt for SFTP would appear pausing - the playbook execution. This is resolved in this release. -* When using the ``zos_copy`` or ``zos_fetch`` modules, if you tried to use - Ansible connection options such as ``host_key_checking`` or ``port``, they - were not included as part of the modules execution. This is resolved in this - release by ensuring compatibility with the ``ansible.builtin.ssh`` plugin - options. Refer to the `SSH plugin`_ documentation to enable supported options. -* Known issues for modules can be found in the **Notes** section of a modules - documentation. - - -Deprecation Notices -------------------- -Features and functions are marked as deprecated when they are enhanced and an -alternative is available. In most cases, the deprecated item will remain -available unless the deprecated function interferes with the offering. -Deprecated functions are no longer supported, and will be removed in a future -release. - -.. _SSH plugin: - https://docs.ansible.com/ansible/latest/collections/ansible/builtin/ssh_connection.html - -.. _SSH port: - https://docs.ansible.com/ansible/latest/collections/ansible/builtin/ssh_connection.html#parameter-port - -Version 1.3.6 -============= - -What's New ----------- - -* Bug Fixes - - * Modules - - * ``zos_copy`` fixes a bug that when a directory is copied from the - controller to the managed node and a mode is set, the mode is now applied - to the directory on the controller. If the directory being copied contains - files and mode is set, mode will only be applied to the files being copied - not the pre-existing files. - * ``zos_copy`` - fixes a bug where options were not defined in the module - argument spec that will result in error when running `ansible-core` v2.11 - and using options `force` or `mode`. - * ``zos_copy`` - was enhanced for when `src` is a directory and ends with "/", - the contents of it will be copied into the root of `dest`. It it doesn't - end with "/", the directory itself will be copied. - * ``zos_fetch`` - fixes a bug where an option was not defined in the module - argument spec that will result in error when running `ansible-core` v2.11 - and using option `encoding`. - * ``zos_job_submit`` - fixes a bug where an option was not defined in the - module argument spec that will result in error when running - `ansible-core` v2.11 and using option `encoding`. - * ``jobs.py`` - fixes a utility used by module `zos_job_output` that would - truncate the DD content. - * ``zos_ssh`` connection plugin was updated to correct a bug that causes - an `ANSIBLE_SSH_CONTROL_PATH_DIR` attribute error only when using - ansible-core v2.11. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ v3.8.2 - - `IBM Open Enterprise SDK for Python`_ v3.9.5 -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Version 1.3.5 -============= - -What's New ----------- - -* Bug Fixes - - * Modules - - * ``zos_ssh`` connection plugin was updated to correct a bug in Ansible that - would result in playbook task ``retries`` overriding the SSH connection - ``retries``. This is resolved by renaming the ``zos_ssh`` option - ``retries`` to ``reconnection_retries``. The update addresses users of - ``ansible-core`` v2.9 which continues to use ``retries`` and users of - ``ansible-core`` v2.11 or later which uses ``reconnection_retries``. This - also resolves a bug in the connection that referenced a deprecated - constant. - * ``zos_job_output`` fixes a bug that returned all ddname's when a specific - ddname was provided. Now a specific ddname can be returned and all others - ignored. - * ``zos_copy`` fixes a bug that would not copy subdirectories. If the source - is a directory with sub directories, all sub directories will now be copied. - -Availability +Requirements ------------ -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Version 1.3.3 -============= - -What's New ----------- - -* Bug Fixes - - * Modules - - * ``zos_copy`` was updated to correct deletion of all temporary files and - unwarranted deletes. - - * When the module would complete, a cleanup routine did not take into - account that other processes had open temporary files and thus would - error when trying to remove them. - * When the module would copy a directory (source) from USS to another - USS directory (destination), any files currently in the destination - would be deleted. - The modules behavior has changed such that files are no longer deleted - unless the ``force`` option is set to ``true``. When ``force=true``, - copying files or a directory to a USS destination will continue if it - encounters existing files or directories and overwrite any - corresponding files. - * ``zos_job_query`` was updated to correct a boolean condition that always - evaluated to "CANCELLED". - - * When querying jobs that are either **CANCELLED** or have **FAILED**, - they were always treated as **CANCELLED**. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Version 1.3.1 -============= - -What's New ----------- - -* Bug Fixes - - * Modules - - * Connection plugin ``zos_ssh`` was updated to prioritize the execution of - modules written in REXX over other implementations such is the case for - ``zos_ping``. - * ``zos_ping`` was updated to support Automation Hub documentation - generation. - -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Known issues ------------- - -* Modules - - * When executing programs using ``zos_mvs_raw``, you may encounter errors - that originate in the implementation of the programs. Two such known issues - are noted below of which one has been addressed with an APAR. - - #. ``zos_mvs_raw`` module execution fails when invoking - Database Image Copy 2 Utility or Database Recovery Utility in conjunction - with FlashCopy or Fast Replication. - #. ``zos_mvs_raw`` module execution fails when invoking DFSRRC00 with parm - "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is - addressed by APAR PH28089. - -Version 1.3.0 -============= - -What's New ----------- - -* Modules - - * ``zos_apf`` - Add or remove libraries to and from Authorized Program Facility (APF). - * ``zos_backup_restore`` - Backup and restore data sets and volumes. - * ``zos_blockinfile`` - Manage block of multi-line textual data on z/OS. - * ``zos_find`` - Find matching data sets. - * ``zos_data_set`` - added support to allocate and format zFS data sets - * ``zos_operator`` - supports new options **wait** and **wait_time_s** such - that you can specify that ``zos_operator`` wait the full **wait_time_s** or - return as soon as the first operator command executes. - * All modules support relative paths and remove choice case sensitivity. - -* Bug Fixes - - * Modules - - * Action plugin ``zos_copy`` was updated to support Python 2.7. - * Module ``zos_copy`` was updated to fail gracefully when a it - encounters a non-zero return code. - * Module ``zos_copy`` was updated to support copying data set members that - are program objects to a PDSE. Prior to this update, copying data set - members would yield an error: - **FSUM8976 Error writing <src_data_set_member> to PDSE member - <dest_data_set_member>** - * Job utility is an internal library used by several modules. It has been - updated to use a custom written parsing routine capable of handling - special characters to prevent job related reading operations from failing - when a special character is encountered. - * Module ``zos_job_submit`` was updated to remove all trailing **\r** from - jobs that are submitted from the controller. - * Module ``zos_job_submit`` referenced a non-existent option and was - corrected to **wait_time_s**. - * Module ``zos_tso_command`` support was added for when the command output - contained special characters. - - * Playbooks - - * Playbook `zos_operator_basics.yaml`_ - has been updated to use `end` in the WTO reply over the previous use of - `cancel`. Using `cancel` is not a valid reply and results in an execution - error. - -* Playbooks - - * In each release, we continue to expand on use cases and deliver them as - playbooks in the `playbook repository`_ that can be easily tailored to any - system. - - * Authorize and - `synchronize APF authorized libraries on z/OS from a configuration file cloned from GitHub`_ - * Automate program execution with - `copy, sort and fetch data sets on z/OS playbook`_. - * Automate user management with add, remove, grant permission, - generate passwords, create zFS, mount zFS and send email - notifications when deployed to Ansible Tower or AWX with the - `manage z/OS Users Using Ansible`_ playbook. - * Use the `configure Python and ZOAU Installation`_ playbook to scan the - **z/OS** target to find the latest supported configuration and generate - `inventory`_ and a `variables`_ configuration. - * Automate software management with `SMP/E Playbooks`_ - * All playbooks have been updated to use our temporary data set feature - to avoid any concurrent data set name problems. - * In the prior release, all sample playbooks previously included with the - collection were migrated to the `playbook repository`_. The - `playbook repository`_ categorizes playbooks into **z/OS concepts** and - **topics**, it also covers `playbook configuration`_ as well as provide - additional community content such as **blogs** and where to open - `support tickets`_ for the playbooks. - -* Documentation - - * All documentation related to `playbook configuration`_ has been - migrated to the `playbook repository`_. Each playbook contains a README - that explains what configurations must be made to run a sample playbook. - * We have been carefully reviewing our users feedback and over time we have - compiled a list of information that we feel would help everyone and have - released this information in our new `FAQs`_. - * Learn about the latest features and experience them before you try - them through the blogs that discuss playbooks, modules, and use cases: - - * `Running Batch Jobs on z/OS using Ansible`_ details how - to write and execute batch jobs without having to deal with JCL. - - * `z/OS User Management With Ansible`_ explains all about the user management - playbook and its optional integration into AWX. - -Availability ------------- - -* `Galaxy`_ -* `GitHub`_ - -Reference ---------- - -* Supported by `z/OS®`_ V2R4 (or later) but prior to version V3R1 -* Supported by the `z/OS® shell`_ -* Supported by `IBM Open Enterprise SDK for Python`_ 3.8.2 or later -* Supported by IBM `Z Open Automation Utilities 1.1.0`_ and - `Z Open Automation Utilities 1.1.1`_ - -Known issues ------------- - -* Modules - - * When executing programs using ``zos_mvs_raw``, you may encounter errors - that originate in the implementation of the programs. Two such known issues - are noted below of which one has been addressed with an APAR. - - #. ``zos_mvs_raw`` module execution fails when invoking - Database Image Copy 2 Utility or Database Recovery Utility in conjunction - with FlashCopy or Fast Replication. - #. ``zos_mvs_raw`` module execution fails when invoking DFSRRC00 with parm - "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is - addressed by APAR PH28089. +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. .. ............................................................................. .. Global Links @@ -1035,6 +631,8 @@ Known issues https://www.ibm.com/docs/en/python-zos/3.11 .. _3.12: https://www.ibm.com/docs/en/python-zos/3.12 +.. _Z Open Automation Utilities: + https://www.ibm.com/docs/en/zoau/latest .. _Z Open Automation Utilities 1.1.0: https://www.ibm.com/docs/en/zoau/1.1.x .. _Z Open Automation Utilities 1.1.1: @@ -1047,6 +645,8 @@ Known issues https://www.ibm.com/docs/en/zoau/1.2.x .. _Z Open Automation Utilities 1.2.5: https://www.ibm.com/docs/en/zoau/1.2.x +.. _Z Open Automation Utilities 1.3.0: + https://www.ibm.com/docs/en/zoau/1.3.x .. _z/OS® shell: https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS®: @@ -1059,6 +659,8 @@ Known issues https://www.ibm.com/docs/en/zos .. _FAQs: https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html +.. _z/OS core support matrix: + https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/build/html/resources/releases_maintenance.html .. ............................................................................. .. Playbook Links diff --git a/docs/source/requirements-single.rst b/docs/source/requirements-single.rst index e31c9636a..c75c78f85 100644 --- a/docs/source/requirements-single.rst +++ b/docs/source/requirements-single.rst @@ -2,7 +2,7 @@ .. Auto generated restructured text . .. ........................................................................... .. ........................................................................... -.. © Copyright IBM Corporation 2020 . +.. © Copyright IBM Corporation 2024 . .. ........................................................................... ============ @@ -32,7 +32,6 @@ The managed z/OS node is the host that is managed by Ansible, as identified in the Ansible inventory. For the **IBM z/OS core collection** to manage the z/OS node, some dependencies are required to be installed on z/OS such as: -* `z/OS`_ * `z/OS`_ * `z/OS OpenSSH`_ * `z/OS® shell`_ @@ -44,20 +43,20 @@ some dependencies are required to be installed on z/OS such as: Each release of the IBM z/OS core collection depends on specific dependency versions. For information on the dependencies or the versions, review the - `release notes`_ reference section. + `release notes`_ reference section. z/OS shell ---------- Currently, only the `z/OS® shell`_ is supported. Using ``ansible_shell_executable`` to change the default shell is discouraged. Shells such as ``bash`` are not supported -because it handles the reading and writing of untagged files differently. +because it handles the reading and writing of untagged files differently. Open Enterprise SDK for Python ------------------------------ -The **IBM z/OS core collection** requires that the **IBM Open Enterprise SDK for Python** -be installed on z/OS. +The **IBM z/OS core collection** requires that the **IBM Open Enterprise SDK for Python** +be installed on z/OS. **Installation** diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst new file mode 100644 index 000000000..acb0e6559 --- /dev/null +++ b/docs/source/resources/releases_maintenance.rst @@ -0,0 +1,98 @@ +.. ........................................................................... +.. © Copyright IBM Corporation 2024 . +.. ........................................................................... + +======================== +Releases and maintenance +======================== + +This table describes the collections release dates, dependency versions and End of Life dates (EOL). + +The ``ibm_zos_core`` collection is developed and released on a flexible release cycle; generally, each quarter +a beta is released followed by a GA version. Occasionally, the cycle may be extended to properly implement and +test larger changes before a new release is made available. + +End of Life for this collection is generally a 2-year cycle unless a dependency reaches EOL prior to the 2 years. +For example, if a collection has released and its dependency reaches EOL 1 year later, then the collection will EOL +at the same time as the dependency, 1 year later. + +These are the component versions available when the collection was made generally available (GA). The underlying +component version is likely to change as it reaches EOL, thus components must be a version that is +currently supported. + +For example, if a collection releases with a minimum version of ``ansible-core`` 2.14.0 (Ansible 7.0) and later this +enters into EOL, then a newer supported version of ``ansible-core`` (Ansible) must be selected. When choosing a newer +``ansible-core`` (Ansible) version, review the `ansible-core support matrix`_ to select the appropriate dependencies. +This is important to note, different releases of ``ansible-core`` can require newer controller and managed node +dependencies such as is the case with Python. + +If the controller is Ansible Automation Platform (AAP), review the `Red Hat Ansible Automation Platform Life Cycle`_ +to select a supported AAP version. + +For IBM product lifecycle information, you can search for products using a product name, version or ID. For example, +to view IBM's **Open Enterprise SDK for Python** lifecycle, search on product ID `5655-PYT`_, and for **Z Open Automation Utilities**, +search on product ID `5698-PA1`_. + +Support Matrix +============== ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| Version | Controller | Managed Node | GA | End of Life | ++=========+============================+===================================================+===============+===============+ +| 1.10.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V2Rx | In preview | TBD | +| |- `Ansible`_ >=8.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ >=1.3.0 | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.9.x |- `ansible-core`_ >=2.14 |- `z/OS`_ V2R4 - V2Rx | 05 Feb 2024 | 30 April 2025 | +| |- `Ansible`_ >=7.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.5 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.8.x |- `ansible-core`_ >=2.14 |- `z/OS`_ V2R4 - V2Rx | 13 Dec 2023 | 30 April 2025 | +| |- `Ansible`_ >=7.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.4 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.7.x |- `ansible-core`_ >=2.14 |- `z/OS`_ V2R4 - V2Rx | 10 Oct 2023 | 30 April 2025 | +| |- `Ansible`_ >=7.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.3 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.3 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.6.x |- `ansible-core`_ >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 28 June 2023 | 30 April 2025 | +| |- `Ansible`_ >=2.9.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ +| 1.5.x |- `ansible-core`_ >=2.9.x |- `z/OS`_ V2R3 - V2Rx | 25 April 2023 | 25 April 2025 | +| |- `Ansible`_ >=2.9.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=1.2 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ 1.2.2 - 1.2.x | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ + +.. ............................................................................. +.. Global Links +.. ............................................................................. +.. _ansible-core support matrix: + https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix +.. _AAP: + https://access.redhat.com/support/policy/updates/ansible-automation-platform +.. _Red Hat Ansible Automation Platform Life Cycle: + https://access.redhat.com/support/policy/updates/ansible-automation-platform +.. _Automation Hub: + https://www.ansible.com/products/automation-hub +.. _Open Enterprise SDK for Python: + https://www.ibm.com/products/open-enterprise-python-zos +.. _Z Open Automation Utilities: + https://www.ibm.com/docs/en/zoau/latest +.. _z/OS shell: + https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm +.. _z/OS: + https://www.ibm.com/docs/en/zos +.. _5655-PYT: + https://www.ibm.com/support/pages/lifecycle/search?q=5655-PYT +.. _5698-PA1: + https://www.ibm.com/support/pages/lifecycle/search?q=5698-PA1 +.. _ansible-core: + https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix +.. _Ansible: + https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix \ No newline at end of file diff --git a/docs/source/resources/resources.rst b/docs/source/resources/resources.rst index 8b5951948..8bdb16a6c 100644 --- a/docs/source/resources/resources.rst +++ b/docs/source/resources/resources.rst @@ -1,5 +1,5 @@ .. ........................................................................... -.. © Copyright IBM Corporation 2020, 2021 . +.. © Copyright IBM Corporation 2024 . .. ........................................................................... ========= @@ -10,3 +10,4 @@ Resources :maxdepth: 1 character_set + releases_maintenance diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 7e24bc280..9b4dfde5e 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - "1.3.0" + - ">=1.3.0" diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index e3ea36dc8..62bde96bf 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019-2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -245,7 +245,6 @@ def run(self, tmp=None, task_vars=None): original_src = task_args.get("src") if original_src: if not remote_src: - base_name = os.path.basename(original_src) if original_src.endswith("/"): src = temp_path + "/" else: diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 611922bf3..56232f34f 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 8e06c340b..67047b648 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/action/zos_ping.py b/plugins/action/zos_ping.py index 9d644d389..b3b2b328a 100644 --- a/plugins/action/zos_ping.py +++ b/plugins/action/zos_ping.py @@ -1,6 +1,6 @@ # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # Copyright (c) 2017 Ansible Project -# Copyright IBM Corporation 2020, 2021, 2022 +# Copyright IBM Corporation 2020, 2022 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index ed508bcf0..b0a1fa466 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/filter/wtor.py b/plugins/filter/wtor.py index 17b530218..483fbdb73 100644 --- a/plugins/filter/wtor.py +++ b/plugins/filter/wtor.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -54,8 +54,8 @@ - name: Evaluate if there are any existing dump messages matching 'IEE094D SPECIFY OPERAND' assert: that: - - is_specify_operand is defined - - bool_zos_operator_action_continue + - is_specify_operand is defined + - bool_zos_operator_action_continue success_msg: "Found 'IEE094D SPECIFY OPERAND' message." fail_msg: "Did not find 'IEE094D SPECIFY OPERAND' message." """ diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index f68a8ab77..83e9746c0 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/ickdsf.py b/plugins/module_utils/ickdsf.py index 436750c21..7081e2163 100644 --- a/plugins/module_utils/ickdsf.py +++ b/plugins/module_utils/ickdsf.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/import_handler.py b/plugins/module_utils/import_handler.py index 507dd2f65..7b5031216 100644 --- a/plugins/module_utils/import_handler.py +++ b/plugins/module_utils/import_handler.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index c25789030..8d9ac3a5c 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -385,7 +385,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T try: list_of_dds = jobs.list_dds(entry.job_id) - except exceptions.DDQueryException as err: + except exceptions.DDQueryException: is_dd_query_exception = True # Check if the Job has JESJCL, if not, its in the JES INPUT queue, thus wait the full wait_time_s. @@ -406,7 +406,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T list_of_dds = jobs.list_dds(entry.job_id) is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False - except exceptions.DDQueryException as err: + except exceptions.DDQueryException: is_dd_query_exception = True continue @@ -463,7 +463,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T single_dd["step_name"], single_dd["dd_name"] ) - except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError) as e: + except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError): tmpcont = ( "Non-printable UTF-8 characters were present in this output. " "Please access it from the job log." diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index 7307ff300..49511d725 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 - 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/template.py b/plugins/module_utils/template.py index a2a24c34f..a40f4dbc8 100644 --- a/plugins/module_utils/template.py +++ b/plugins/module_utils/template.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/validation.py b/plugins/module_utils/validation.py index fe41c0a01..a645d3362 100644 --- a/plugins/module_utils/validation.py +++ b/plugins/module_utils/validation.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/zoau_version_checker.py b/plugins/module_utils/zoau_version_checker.py index 442bf831a..459044950 100644 --- a/plugins/module_utils/zoau_version_checker.py +++ b/plugins/module_utils/zoau_version_checker.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 - 2023 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index ba74ca38a..5afe05c50 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 664b2e493..c9cc8ba6b 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -59,7 +59,7 @@ - The identifier for the volume containing the library specified in the C(library) parameter. The values must be one the following. - 1. The volume serial number. - - 2. Six asterisks (******), indicating that the system must use the + - 2. Six asterisks C(******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. @@ -176,7 +176,7 @@ specified on the C(library) parameter. The values must be one of the following. - 1. The volume serial number - - 2. Six asterisks (******), indicating that the system must use the + - 2. Six asterisks C(******), indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. *MCAT*, indicating that the system must use the volume serial @@ -221,7 +221,7 @@ - name: Add a library (cataloged) to the APF list and persistence zos_apf: library: SOME.SEQUENTIAL.DATASET - force_dynamic: True + force_dynamic: true persistent: data_set_name: SOME.PARTITIONED.DATASET(MEM) - name: Remove a library from the APF list and persistence @@ -239,7 +239,7 @@ batch: - library: SOME.SEQ.DS1 - library: SOME.SEQ.DS2 - sms: True + sms: true - library: SOME.SEQ.DS3 volume: T12345 - name: Print the APF list matching library pattern or volume serial number diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index e046a3f9e..713685bf9 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 - 2024 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -357,7 +357,7 @@ name: terse format_options: terse_pack: "spack" - use_adrdssu: True + use_adrdssu: true # Use a pattern to store - name: Compress data set pattern using xmit diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 1bb0d8977..0c814637e 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -223,7 +223,7 @@ data_sets: include: user.** backup_name: /tmp/temp_backup.dzp - recover: yes + recover: true - name: Backup all datasets matching the pattern USER.** to data set MY.BACKUP.DZP, allocate 100MB for data sets used in backup process. @@ -253,7 +253,7 @@ operation: backup backup_name: /tmp/temp_backup.dzp volume: MYVOL1 - full_volume: yes + full_volume: true space: 1 space_type: g @@ -296,7 +296,7 @@ zos_backup_restore: operation: restore volume: MYVOL2 - full_volume: yes + full_volume: true backup_name: MY.BACKUP.DZP space: 1 space_type: g diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 775809230..e10ef522e 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -497,7 +497,7 @@ def execute_dmod(src, block, marker, force, encoding, state, module, ins_bef=Non cmd = "dmod -b {0} {1} {2} {3}".format(force, encoding, marker, opts) else: - cmd = """dmod -b {0} {1} {2} "//d" {4}""".format(force, encoding, marker, src) + cmd = """dmod -b {0} {1} {2} {3}""".format(force, encoding, marker, src) rc, stdout, stderr = module.run_command(cmd) cmd = clean_command(cmd) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 4333a75b6..38a1542b5 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -512,7 +512,7 @@ zos_copy: src: /path/to/foo.conf dest: /etc/foo.conf - mode: 0644 + mode: "0644" group: foo owner: bar diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index b85e97aea..d3ef4e8d1 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -674,7 +674,7 @@ space_type: m record_format: u record_length: 25 - replace: yes + replace: true - name: Attempt to replace a data set if it exists. If not found in the catalog, check if it is available on volume 222222, and catalog if found. zos_data_set: @@ -685,7 +685,7 @@ record_format: u record_length: 25 volumes: "222222" - replace: yes + replace: true - name: Create an ESDS data set if it does not exist zos_data_set: @@ -720,7 +720,7 @@ zos_data_set: name: someds.name.here(mydata) type: member - replace: yes + replace: true - name: Write a member to an existing PDS; do not replace if member exists zos_data_set: @@ -738,22 +738,22 @@ name: someds.name.here(mydata) state: absent type: member - force: yes + force: true - name: Create multiple partitioned data sets and add one or more members to each zos_data_set: batch: - - name: someds.name.here1 + - name: someds.name.here1 type: pds space_primary: 5 space_type: m record_format: fb - replace: yes + replace: true - name: someds.name.here1(member1) type: member - name: someds.name.here2(member1) type: member - replace: yes + replace: true - name: someds.name.here2(member2) type: member diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 243abb2d9..e9afa4994 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -140,8 +140,8 @@ encoding: from: IBM-1047 to: ISO8859-1 - backup: yes - backup_compress: yes + backup: true + backup_compress: true - name: Convert file encoding from IBM-1047 to ISO8859-1 to a directory zos_encode: @@ -249,7 +249,6 @@ encoding: from: ISO8859-1 to: IBM-1047 - """ RETURN = r""" diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index fda237768..8b4d4809d 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_gather_facts.py b/plugins/modules/zos_gather_facts.py index a9df42a49..eb7699cdb 100644 --- a/plugins/modules/zos_gather_facts.py +++ b/plugins/modules/zos_gather_facts.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 - 2024 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 6a6328e67..21e0af3e6 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 279a3955f..be2bb513f 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 43e85061b..0988ef2d4 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -229,15 +229,14 @@ src: /tmp/src/somefile regexp: '^(.*)User(\d+)m(.*)$' line: '\1APPUser\3' - backrefs: yes + backrefs: true - name: Add a line to a member while a task is in execution zos_lineinfile: src: SOME.PARTITIONED.DATA.SET(DATA) insertafter: EOF line: 'Should be a working test now' - force: True - + force: true """ RETURN = r""" diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 8828d9005..7b4b04654 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -344,8 +344,8 @@ fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - comment: For Tape2 project + data_store: SYS1.PARMLIB(BPXPRMAA) + comment: For Tape2 project - name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB. zos_mount: @@ -354,10 +354,10 @@ fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - backup: Yes - backup_name: SYS1.PARMLIB(BPXPRMAB) - comment: For Tape2 project + data_store: SYS1.PARMLIB(BPXPRMAA) + backup: true + backup_name: SYS1.PARMLIB(BPXPRMAB) + comment: For Tape2 project - name: Mount a filesystem ignoring uid/gid values. zos_mount: @@ -365,7 +365,7 @@ path: /u/omvsadm/core fs_type: zfs state: mounted - allow_uid: no + allow_uid: false - name: Mount a filesystem asynchronously (don't wait for completion). zos_mount: @@ -400,7 +400,6 @@ state: mounted automove: AUTOMOVE automove_list: EXCLUDE,DEV4,DEV5,DEV6,DEV7 - """ RETURN = r""" diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index af24cd8e3..d05fef6db 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -1299,7 +1299,7 @@ dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1323,7 +1323,7 @@ dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1368,7 +1368,7 @@ dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - reuse: yes + reuse: true type: seq space_primary: 5 space_secondary: 1 @@ -1396,8 +1396,8 @@ dd_name: sysprint data_set_name: mypgm.output.ds disposition: new - replace: yes - backup: yes + replace: true + backup: true type: seq space_primary: 5 space_secondary: 1 @@ -1468,7 +1468,7 @@ - name: Take a set of data sets and write them to an archive. zos_mvs_raw: program_name: adrdssu - auth: yes + auth: true dds: - dd_data_set: dd_name: archive @@ -1484,7 +1484,7 @@ - name: Merge two sequential data sets and write them to new data set zos_mvs_raw: program_name: sort - auth: no + auth: false parm: "MSGPRT=CRITICAL,LIST" dds: - dd_data_set: @@ -1515,7 +1515,7 @@ files. zos_mvs_raw: pgm: idcams - auth: yes + auth: true dds: - dd_concat: dd_name: sysprint @@ -1532,57 +1532,56 @@ dd_name: sysin content: " LISTCAT ENTRIES('SYS1.*')" -- name: Drop the contents of input dataset into output dataset - using REPRO command. +- name: Drop the contents of input dataset into output dataset using REPRO command. zos_mvs_raw: pgm: idcams - auth: yes + auth: true dds: - - dd_data_set: - dd_name: INPUT - data_set_name: myhlq.ds1.input - - dd_data_set: - dd_name: OUTPUT - data_set_name: myhlq.ds1.output - - dd_input: - dd_name: sysin - content: | + - dd_data_set: + dd_name: INPUT + data_set_name: myhlq.ds1.input + - dd_data_set: + dd_name: OUTPUT + data_set_name: myhlq.ds1.output + - dd_input: + dd_name: sysin + content: | " REPRO - INFILE(INPUT) - OUTFILE(OUTPUT)" - - dd_output: - dd_name: sysprint - return_content: - type: text - - - name: Define a cluster using a literal block style indicator - with a 2 space indentation. - zos_mvs_raw: - program_name: idcams - auth: yes - dds: - - dd_output: - dd_name: sysprint - return_content: - type: text - - dd_input: - dd_name: sysin - content: |2 - DEFINE CLUSTER - - (NAME(ANSIBLE.TEST.VSAM) - - CYL(10 10) - - FREESPACE(20 20) - - INDEXED - - KEYS(32 0) - - NOERASE - - NONSPANNED - - NOREUSE - - SHAREOPTIONS(3 3) - - SPEED - - UNORDERED - - RECORDSIZE(4086 32600) - - VOLUMES(222222) - - UNIQUE) + - dd_output: + dd_name: sysprint + return_content: + type: text + +- name: Define a cluster using a literal block style indicator + with a 2 space indentation. + zos_mvs_raw: + program_name: idcams + auth: true + dds: + - dd_output: + dd_name: sysprint + return_content: + type: text + - dd_input: + dd_name: sysin + content: 2 + DEFINE CLUSTER - + (NAME(ANSIBLE.TEST.VSAM) - + CYL(10 10) - + FREESPACE(20 20) - + INDEXED - + KEYS(32 0) - + NOERASE - + NONSPANNED - + NOREUSE - + SHAREOPTIONS(3 3) - + SPEED - + UNORDERED - + RECORDSIZE(4086 32600) - + VOLUMES(222222) - + UNIQUE) """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index ba6e4ee77..415d94f3e 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -121,7 +121,7 @@ system: mv29 message_filter: filter: ^.*IMS.*$ - use_regex: yes + use_regex: true """ RETURN = r""" diff --git a/plugins/modules/zos_ping.rexx b/plugins/modules/zos_ping.rexx index beca54c3b..78e09f6b5 100644 --- a/plugins/modules/zos_ping.rexx +++ b/plugins/modules/zos_ping.rexx @@ -1,7 +1,7 @@ /* rexx __ANSIBLE_ENCODE_EBCDIC__ */ /* WANT_JSON */ -/* Copyright (c) IBM Corporation 2019, 2020, 2023 */ +/* Copyright (c) IBM Corporation 2019, 2023 */ /* Licensed under the Apache License, Version 2.0 (the "License"); */ /* you may not use this file except in compliance with the License. */ diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index e4f93ef21..580773219 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 31d709a3a..cb587dc0e 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 - 2024 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -356,8 +356,8 @@ format: name: xmit format_options: - use_adrdssu: True - list: True + use_adrdssu: true + list: true ''' RETURN = r''' diff --git a/plugins/modules/zos_volume_init.py b/plugins/modules/zos_volume_init.py index 0be4f2a8f..d0a2c55be 100644 --- a/plugins/modules/zos_volume_init.py +++ b/plugins/modules/zos_volume_init.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -151,14 +151,14 @@ zos_volume_init: address: "1234" volid: "DEMO01" - sms_managed: no + sms_managed: false - name: Initialize non-SMS managed target volume with all the default options and override the default high level qualifier (HLQ). zos_volume_init: address: 1234 volid: DEMO01 - sms_managed: no + sms_managed: false tmp_hlq: TESTUSR - name: Initialize a new SMS managed DASD volume with new volume serial 'e8d8' with 30 track VTOC, an index, as long as @@ -167,12 +167,12 @@ zos_volume_init: address: e8d8 vtoc_size: 30 - index: yes - sms_managed: yes + index: true + sms_managed: true volid: ine8d8 verify_volid: ine8d8 - verify_volume_empty: yes - verify_offline: no + verify_volume_empty: true + verify_offline: false - name: Initialize 3 new DASD volumes (0901, 0902, 0903) for use on a z/OS system as 'DEMO01', 'DEMO02', 'DEMO03' using Ansible loops. diff --git a/scripts/mounts.env b/scripts/mounts.env index 18eae5ce1..dde75ef71 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -1,5 +1,5 @@ # ============================================================================== -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -60,7 +60,7 @@ python_mount_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz:/allpyt "5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11:IMSTESTU.PYZ.V3B02.ZFS "\ "6:3.11-ga:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-ga:IMSTESTU.PYZ.V311GA.ZFS "\ "7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.11-3:IMSTESTU.PYZ.V3B03.ZFS "\ -"8:3.12:/allpython/3.12/usr/lpp/IBM/cyp/v3r11/pyz:/allpython/3.12:IMSTESTU.PYZ.V3C0.ZFS " +"8:3.12:/allpython/3.12/usr/lpp/IBM/cyp/v3r12/pyz:/allpython/3.12:IMSTESTU.PYZ.V3C0.ZFS " # ------------------------------------------------------------------------------ # PYTHON PATH POINTS @@ -79,4 +79,4 @@ python_path_list_str="1:3.8.2:/allpython/3.8.2/usr/lpp/IBM/cyp/v3r8/pyz "\ "5:3.11:/allpython/3.11/usr/lpp/IBM/cyp/v3r11/pyz "\ "6:3.11:/allpython/3.11-ga/usr/lpp/IBM/cyp/v3r11/pyz "\ "7:3.11-3:/allpython/3.11-3/usr/lpp/IBM/cyp/v3r11/pyz "\ -"8:3.12:/allpython/3.12/usr/lpp/IBM/cyp/v3r12/pyz " \ No newline at end of file +"8:3.12:/allpython/3.12/usr/lpp/IBM/cyp/v3r12/pyz " diff --git a/scripts/requirements-2.12.env b/scripts/requirements-2.12.env index 630b617ad..229e4edcb 100644 --- a/scripts/requirements-2.12.env +++ b/scripts/requirements-2.12.env @@ -1,6 +1,6 @@ #!/bin/sh # ============================================================================== -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -25,7 +25,7 @@ requirements=( "ansible-core:2.12.10" "pylint" "rstcheck" -"ansible-lint:6.22.2" +"ansible-lint:6.22.1" ) python=( diff --git a/scripts/requirements-2.13.env b/scripts/requirements-2.13.env index a649e0cf7..4720e9352 100644 --- a/scripts/requirements-2.13.env +++ b/scripts/requirements-2.13.env @@ -25,7 +25,7 @@ requirements=( "ansible-core:2.13.13" "pylint" "rstcheck" -"ansible-lint:6.22.2" +"ansible-lint:6.22.1" ) python=( diff --git a/scripts/requirements-2.14.env b/scripts/requirements-2.14.env index 9b4c12673..40a80dbf2 100644 --- a/scripts/requirements-2.14.env +++ b/scripts/requirements-2.14.env @@ -25,7 +25,7 @@ requirements=( "ansible-core:2.14.16" "pylint" "rstcheck" -"ansible-lint:6.22.2" +"ansible-lint:6.22.1" ) python=( diff --git a/scripts/requirements-2.15.env b/scripts/requirements-2.15.env index 7f0f42b1b..4ca546686 100644 --- a/scripts/requirements-2.15.env +++ b/scripts/requirements-2.15.env @@ -1,6 +1,6 @@ #!/bin/sh # ============================================================================== -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -25,7 +25,7 @@ requirements=( "ansible-core:2.15.11" "pylint" "rstcheck" -"ansible-lint:6.22.2" +"ansible-lint:6.22.1" ) python=( diff --git a/scripts/requirements-2.16.env b/scripts/requirements-2.16.env index 1ac4c4fa4..050c27aca 100644 --- a/scripts/requirements-2.16.env +++ b/scripts/requirements-2.16.env @@ -25,7 +25,7 @@ requirements=( "ansible-core:2.16.3" "pylint" "rstcheck" -"ansible-lint:6.22.2" +"ansible-lint:6.22.1" ) python=( diff --git a/scripts/venv.sh b/scripts/venv.sh index 597aeee23..45c3d130e 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -1,7 +1,7 @@ #!/bin/sh # ============================================================================== -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 9722b92fa..7c19ea31a 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -562,4 +562,4 @@ def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): assert result.get("msg") == 'marker length may not exceed 72 characters' finally: - clean_test_env(hosts, test_info) \ No newline at end of file + clean_test_env(hosts, test_info) diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index f6b1140fa..e01994138 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 13e6d367b..086b7d27e 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -230,7 +230,6 @@ //STDERR DD SYSOUT=* //""" - def populate_dir(dir_path): for i in range(5): with open(dir_path + "/" + "file" + str(i + 1), "w") as infile: diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index d01705597..c140a60cf 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -178,7 +178,6 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst assert result.get("changed") is True # verify second uncatalog shows uncatalog already performed results = hosts.all.zos_data_set(name=dataset, state="uncataloged") - for result in results.contacted.values(): assert result.get("changed") is False # recatalog the data set diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 4b74c8834..df01a6133 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 4d72a6cc5..7fd44651e 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 42a8db23e..067a2f192 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_gather_facts_func.py b/tests/functional/modules/test_zos_gather_facts_func.py index f2861c596..0d28b8f25 100644 --- a/tests/functional/modules/test_zos_gather_facts_func.py +++ b/tests/functional/modules/test_zos_gather_facts_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022 - 2024 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index e92d377d4..96bc0b2bc 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2022, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index c306b1450..d0e452ac2 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -807,7 +807,6 @@ def test_job_submit_full_input(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") - def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: @@ -1003,14 +1002,12 @@ def test_inexistent_positive_gds(ansible_zos_module): def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): try: hosts = ansible_zos_module - # Copy C source and compile it. hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( cmd="echo {0} > {1}/noprint.c".format(quote(C_SRC_INVALID_UTF8), TEMP_PATH) ) hosts.all.shell(cmd="xlc -o {0}/noprint {0}/noprint.c".format(TEMP_PATH)) - # Create local JCL and submit it. tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index cd1421f41..a9a29227d 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -675,7 +675,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): finally: remove_ds_environment(ansible_zos_module, ds_name) -#GH Issue #1244 +#GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds #@pytest.mark.parametrize("dstype", DS_TYPE) #def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): @@ -698,7 +698,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): # finally: # remove_ds_environment(ansible_zos_module, ds_name) -#GH Issue #1244 +#GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds #@pytest.mark.parametrize("dstype", DS_TYPE) #def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): @@ -721,7 +721,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): # finally: # remove_ds_environment(ansible_zos_module, ds_name) -#GH Issue #1244 +#GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds #@pytest.mark.parametrize("dstype", DS_TYPE) #def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype): @@ -744,7 +744,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): # finally: # remove_ds_environment(ansible_zos_module, ds_name) -#GH Issue #1244 +#GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds #@pytest.mark.parametrize("dstype", DS_TYPE) #def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype): diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 39fdd26dd..1e593c3ff 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 - 2024 +# Copyright (c) IBM Corporation 2020, 2024 # Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0) from __future__ import absolute_import, division, print_function diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index cbddd4419..49e55290d 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2022 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index 950e6900f..f8f521a28 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019 - 2024 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 6891cffa8..d60d26ec2 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020, 2023 +# Copyright (c) IBM Corporation 2019, 2023 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index 6f76ceb3f..08eefe336 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -20,7 +20,6 @@ import ansible.utils from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name - def test_zos_tso_command_run_help(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_tso_command(commands=["help"]) diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 790f5b3ef..37697da80 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2023 - 2024 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -281,7 +281,6 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, format): finally: hosts.all.file(path=f"{USS_TEMP_DIR}", state="absent") - @pytest.mark.uss def test_uss_unarchive_copy_to_remote(ansible_zos_module): try: @@ -965,7 +964,6 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force(ansible_zos_module, f hosts.all.shell(cmd=""" drm "{0}*" """.format(DATASET)) hosts.all.zos_data_set(name=MVS_DEST_ARCHIVE, state="absent") - @pytest.mark.ds @pytest.mark.parametrize( "format", [ diff --git a/tests/helpers/volumes.py b/tests/helpers/volumes.py index 952cbb1e7..bd261f9ed 100644 --- a/tests/helpers/volumes.py +++ b/tests/helpers/volumes.py @@ -158,4 +158,4 @@ def create_vvds_on_volume( ansible_zos_module, volume): for vls_res in vls_result.contacted.values(): if vls_res.get("rc") == 0: return True - return False \ No newline at end of file + return False From cac9810c7146136e3b885f32892a1eff27d04810 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Wed, 22 May 2024 09:55:05 -0700 Subject: [PATCH 391/495] Fix url for support matrix (#1517) Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 111dc3d05..1fde47fab 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -660,7 +660,7 @@ controller and z/OS managed node dependencies. .. _FAQs: https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html .. _z/OS core support matrix: - https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/build/html/resources/releases_maintenance.html + https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/releases_maintenance.html .. ............................................................................. .. Playbook Links From 811082d3a807476dbb7d8bd39ac78b6cfc3796c3 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Fri, 31 May 2024 16:48:13 -0400 Subject: [PATCH 392/495] Add symbol support to zos_apf (#1493) * modified DatasetCreatedError message * Added gdg functions * Created unit test for validating gds relative name * Updated to fail when future gen * Update arg parser * Add escaping function for data set names * Add unit tests for name escaping * Remove calls to escape_data_set_name * renamed tests * Added MVSDataset class * Updated escaped symbols * Updated tests * Added utils * Add changelog * Uncommented test * Updated exception * Updated mvsdataset class * Updated class * initial branch save with escaping added, changelog fragment rough-in Also changed dataset to have a bool indicating if symbols can be included in get_tmp_ds_name * corrected changelog fragment to include PR number in link added import to zos_apf to get staticmethod loaded. * added output to a failing test * fixing pprint statement * added escape function to get_tmp_dataset * corrected staticmethod declaration * change in volume finder to add all volume lists to the base value * changing volume lister to handle potentially blank sub-lists * adding protection in get_volumes for a short line of input * added object definition to get_temp_ds_name * added detail into test that is failing (finally back to test) * removing potential import loop * pulled from dev, then removed unneeded arg value. * tweaking the get_volumes logic to handle partial lists. * putting dataset reference back into tests/helpers * re-commenting to see if that impacts loading of backup module * added detail to make sure (looks like some commands actually break with escaping) * sorting out the error details. Will need to table what can't shouldn't be escaped: something@something works, something \@something works, something\\@something fails * temporarily disabled escaping in main zos_apf call * fixed missing declaration issue * cleanup of comments and trace notes * changed fragment to minor_changes * corrected get_tmp_ds for symbols=false removed several print statements from test case. * removed extra line between import statments - unneeded change. * updated symbols generation for better distribution updated credits for both developers involved. * changed test of batch_add_del to not use symbols * remove first ds name (library) symbol option in test_batch_add_del * added prettyprint into failing assertion, re-activating symbols on first get_ds * included output for test print * removed escape of dsn in zos_apf/554 * removed print/pprint from testing re-enabled symbol usage in second test wave * removed a previously commented out line (removing escaping from batch call) --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> --- .../1380-enhancement-add-sybols-zos_apf.yml | 3 ++ plugins/modules/zos_apf.py | 5 ++- tests/functional/modules/test_zos_apf_func.py | 32 +++++++++---------- tests/helpers/dataset.py | 2 +- tests/helpers/volumes.py | 14 +++++--- 5 files changed, 32 insertions(+), 24 deletions(-) create mode 100644 changelogs/fragments/1380-enhancement-add-sybols-zos_apf.yml diff --git a/changelogs/fragments/1380-enhancement-add-sybols-zos_apf.yml b/changelogs/fragments/1380-enhancement-add-sybols-zos_apf.yml new file mode 100644 index 000000000..1cfcf96a7 --- /dev/null +++ b/changelogs/fragments/1380-enhancement-add-sybols-zos_apf.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_apf - Change input to auto-escape 'library' names containing symbols + (https://github.com/ansible-collections/ibm_zos_core/pull/1493). diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index c9cc8ba6b..36156cdd9 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -22,6 +22,8 @@ version_added: '1.3.0' author: - "Behnam (@balkajbaf)" + - "Rich Parker (@richp405)" + - "Fernando Flores (@fernandofloresg))" short_description: Add or remove libraries to Authorized Program Facility (APF) description: - Adds or removes libraries to Authorized Program Facility (APF). @@ -508,7 +510,8 @@ def main(): except ValueError as err: module.fail_json(msg="Parameter verification failed", stderr=str(err)) - library = parsed_args.get('library') + library = parsed_args.get("library") + state = parsed_args.get('state') force_dynamic = parsed_args.get('force_dynamic') volume = parsed_args.get('volume') diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 7c19ea31a..74e74d516 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -14,8 +14,7 @@ from __future__ import absolute_import, division, print_function from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name from ibm_zos_core.tests.helpers.volumes import Volume_Handler -from shellescape import quote -from pprint import pprint +from shlex import quote __metaclass__ = type @@ -56,7 +55,7 @@ def test_add_del(ansible_zos_module, volumes_with_vvds): VolumeHandler = Volume_Handler(volumes_with_vvds) volume = VolumeHandler.get_available_vol() test_info = dict(library="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(3,2) + ds = get_tmp_ds_name(3,2,True) hosts.all.shell(f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -93,7 +92,7 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): tmphlq = "TMPHLQ" test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True)) test_info['tmp_hlq'] = tmphlq - ds = get_tmp_ds_name(3,2) + ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -129,7 +128,7 @@ def test_add_del_volume(ansible_zos_module, volumes_with_vvds): VolumeHandler = Volume_Handler(volumes_with_vvds) volume = VolumeHandler.get_available_vol() test_info = dict(library="", volume="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(1,1) + ds = get_tmp_ds_name(1,1,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -148,6 +147,7 @@ def test_add_del_volume(ansible_zos_module, volumes_with_vvds): hosts.all.shell(cmd=cmdStr) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) + for result in results.contacted.values(): assert result.get("rc") == 0 test_info['state'] = 'absent' @@ -192,7 +192,7 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): VolumeHandler = Volume_Handler(volumes_with_vvds) volume = VolumeHandler.get_available_vol() test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) - ds = get_tmp_ds_name(1,1) + ds = get_tmp_ds_name(1,1,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -251,7 +251,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True ) for item in test_info['batch']: - ds = get_tmp_ds_name(1,1) + ds = get_tmp_ds_name(1,1,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") item['library'] = ds cmdStr = "dls -l " + ds + " | awk '{print $5}' " @@ -259,12 +259,11 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): vol = result.get("stdout") item['volume'] = vol - prstds = get_tmp_ds_name(5,5) + prstds = get_tmp_ds_name(5,5,True) cmdStr = "dtouch -tseq {0}".format(prstds) hosts.all.shell(cmd=cmdStr) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], @@ -279,7 +278,6 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): assert actual == add_exptd test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("rc") == 0 del_exptd = del_expected.replace(" ", "") @@ -315,7 +313,7 @@ def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): volume = VolumeHandler.get_available_vol() test_info = dict(library="", state="present", force_dynamic=True) test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) + ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -357,7 +355,7 @@ def test_add_already_present(ansible_zos_module, volumes_with_vvds): volume = VolumeHandler.get_available_vol() test_info = dict(library="", state="present", force_dynamic=True) test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) + ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -394,7 +392,7 @@ def test_del_not_present(ansible_zos_module, volumes_with_vvds): VolumeHandler = Volume_Handler(volumes_with_vvds) volume = VolumeHandler.get_available_vol() test_info = dict(library="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(1,1) + ds = get_tmp_ds_name(1,1,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -438,7 +436,7 @@ def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): volume = VolumeHandler.get_available_vol() test_info = dict(library="", volume="", state="present", force_dynamic=True) test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) + ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -472,7 +470,7 @@ def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): volume = VolumeHandler.get_available_vol() test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) + ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -506,7 +504,7 @@ def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): volume = VolumeHandler.get_available_vol() test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) + ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: @@ -539,7 +537,7 @@ def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): volume = VolumeHandler.get_available_vol() test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) test_info['state'] = 'present' - ds = get_tmp_ds_name(3,2) + ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: diff --git a/tests/helpers/dataset.py b/tests/helpers/dataset.py index 8e700415c..e25f88e5d 100644 --- a/tests/helpers/dataset.py +++ b/tests/helpers/dataset.py @@ -75,4 +75,4 @@ def get_random_qs(size=7): random_char = random_q[random.choice(range(0, size))] random_q = random_q.replace(random_char, random.choice(special_chars)) count += 1 - return random_q \ No newline at end of file + return random_q diff --git a/tests/helpers/volumes.py b/tests/helpers/volumes.py index bd261f9ed..740fd57dd 100644 --- a/tests/helpers/volumes.py +++ b/tests/helpers/volumes.py @@ -77,6 +77,7 @@ def get_volumes(ansible_zos_module, path): # is a instance of a class to manage the use. hosts = ansible_zos_module list_volumes = [] + all_volumes_list = [] storage_online = [] flag = False iteration = 5 @@ -88,16 +89,19 @@ def get_volumes(ansible_zos_module, path): time.sleep(1) if all_volumes is not None: for volume in all_volumes.contacted.values(): - all_volumes = volume.get('content') - flag = True if len(all_volumes) > 5 else False + temp = volume.get('content') + if temp is not None: + all_volumes_list += temp + flag = True if len(all_volumes_list) > 5 else False iteration -= 1 # Check if the volume is of storage and is active on prefer but also online as a correct option - for info in all_volumes: + for info in all_volumes_list: if "ACTIVATED" in info or "-D U," in info or "UNIT" in info: continue vol_w_info = info.split() - if vol_w_info[2] == 'O' and vol_w_info[4] == "STRG/RSDNT": - storage_online.append(vol_w_info[3]) + if len(vol_w_info)>3: + if vol_w_info[2] == 'O' and vol_w_info[4] == "STRG/RSDNT": + storage_online.append(vol_w_info[3]) # Insert a volumes for the class ls_Volumes to give flag of in_use and correct manage for vol in storage_online: list_volumes.append(vol) From 9740f9e863302ddfe1d96bd3fc91c6170ab815e0 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 31 May 2024 14:48:31 -0600 Subject: [PATCH 393/495] [Bugfix][zos_data_set] Added support for batch mode when creating gdg (#1515) * Added support for batch mode * changed style * Added changelog * Corrected pyflakes --- .../fragments/1515-gdg_batch_creation.yml | 4 +++ plugins/module_utils/data_set.py | 2 +- plugins/modules/zos_data_set.py | 30 +++++++++++++++++-- .../modules/test_zos_data_set_func.py | 23 ++++++++++++++ 4 files changed, 55 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1515-gdg_batch_creation.yml diff --git a/changelogs/fragments/1515-gdg_batch_creation.yml b/changelogs/fragments/1515-gdg_batch_creation.yml new file mode 100644 index 000000000..019705699 --- /dev/null +++ b/changelogs/fragments/1515-gdg_batch_creation.yml @@ -0,0 +1,4 @@ +trivial: + - zos_data_set - Batch mode when type=gdg failed asking for limit option. + Fix now accepts limit as part of batch option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1515). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index bcfd057a3..210218a43 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1032,7 +1032,7 @@ def create( create_exception.response.rc, create_exception.response.stdout_response + "\n" + create_exception.response.stderr_response ) - except exceptions.DatasetVerificationError as e: + except exceptions.DatasetVerificationError: # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 if volumes and len(volumes) > 1: if DataSet.data_set_cataloged(name, volumes): diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index d3ef4e8d1..97bc107fd 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1454,7 +1454,7 @@ def parse_and_validate_args(params): type=dict( type=data_set_type, required=False, - dependencies=["state"], + dependencies=["state", "limit"], choices=DATA_SET_TYPES, ), space_type=dict( @@ -1526,6 +1526,30 @@ def parse_and_validate_args(params): aliases=["volume"], dependencies=["state"], ), + limit=dict( + type="int", + required=False + ), + empty=dict( + type="bool", + required=False + ), + purge=dict( + type="bool", + required=False + ), + scratch=dict( + type="bool", + required=False + ), + extended=dict( + type="bool", + required=False + ), + fifo=dict( + type="bool", + required=False + ), force=dict( type="bool", required=False, @@ -1598,7 +1622,7 @@ def parse_and_validate_args(params): default=False, ), # GDG options - limit=dict(type=limit_type, required=False), + limit=dict(type="int", required=False), empty=dict(type="bool", required=False), purge=dict(type="bool", required=False), scratch=dict(type="bool", required=False), @@ -1721,7 +1745,7 @@ def run_module(): default=False, ), # GDG options - limit=dict(type="int", required=False, no_log=False), + limit=dict(type="int", required=False), empty=dict(type="bool", required=False), purge=dict(type="bool", required=False), scratch=dict(type="bool", required=False), diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index c140a60cf..b8b4fb81a 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -1029,6 +1029,29 @@ def test_gdg_create_and_delete_force(ansible_zos_module): hosts.all.zos_data_set(name=data_set_name, state="absent", force=True, type="gdg") +def test_gdg_create_and_delete_force(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name(2,2, symbols=True) + data_set_list = [f"{data_set_name}A", f"{data_set_name}B", f"{data_set_name}C"] + results = hosts.all.zos_data_set( + batch=[ + {"name":data_set_list[0], "state":"present", "type":"gdg", "limit":3}, + {"name":data_set_list[1], "state":"present", "type":"gdg", "limit":3}, + {"name":data_set_list[2], "state":"present", "type":"gdg", "limit":3}, + ] + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"dls -tGDG ANSIBLE.*") + for result in results.contacted.values(): + for ds_name in data_set_list: + assert ds_name in result.get("stdout") + finally: + results = hosts.all.shell(cmd=f"drm ANSIBLE.*") + + def test_create_special_chars(ansible_zos_module): try: hosts = ansible_zos_module From 163b7e55a901a248fb015ecafe1f39716fd5d607 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 31 May 2024 14:48:51 -0600 Subject: [PATCH 394/495] [Enabler][1376]lineinfile_blockinfile_gdg/gds_and_special_character_support (#1516) * Add support * Add test case * Changes for gdgs * Add test case, and fix for gdg and special characters * Fix sanity * Add fragment * Correct conditional * Fix absent test * Adjust check * Fix bad test case * Fix sanity and test cases * Fix unusual names * Fix new response * Fix sanity * Move validation of name * Resolve comments * Remove variables * Add examples of use of gds * Update zos_lineinfile.py * Add comments * Update plugins/modules/zos_blockinfile.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Update plugins/modules/zos_blockinfile.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Last chance --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ..._gdgsgds_and_special_character_support.yml | 9 + plugins/module_utils/backup.py | 15 +- plugins/module_utils/data_set.py | 18 ++ plugins/modules/zos_blockinfile.py | 113 +++++++- plugins/modules/zos_lineinfile.py | 269 +++++++++++++++--- .../modules/test_zos_blockinfile_func.py | 80 ++++++ .../modules/test_zos_lineinfile_func.py | 99 ++++++- 7 files changed, 548 insertions(+), 55 deletions(-) create mode 100644 changelogs/fragments/1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml diff --git a/changelogs/fragments/1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml b/changelogs/fragments/1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml new file mode 100644 index 000000000..6a32a484a --- /dev/null +++ b/changelogs/fragments/1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml @@ -0,0 +1,9 @@ +minor_changes: + - zos_lineinfile - Added support for GDG and GDS relative name notation to use a data set. + And backup in new generations. Added support for data set names with special characters + like $, /#, /- and @. + (https://github.com/ansible-collections/ibm_zos_core/pull/1516). + - zos_blockinfile - Added support for GDG and GDS relative name notation to use a data set. + And backup in new generations. Added support for data set names with special characters + like $, /#, /- and @. + (https://github.com/ansible-collections/ibm_zos_core/pull/1516). \ No newline at end of file diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index 5b3d09614..f409323d0 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -95,10 +95,10 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): dsn = _validate_data_set_name(dsn).upper() if is_member(dsn): # added the check for a sub-mmember, just in this case - if not bk_dsn: - bk_dsn = extract_dsname(dsn) + "({0})".format(temp_member_name()) - elif "(" not in bk_dsn: + if not bk_dsn or "(" not in bk_dsn: bk_dsn = extract_dsname(dsn) + "({0})".format(temp_member_name()) + elif DataSet.is_gds_positive_relative_name(bk_dsn): + bk_dsn = datasets.create(bk_dsn) bk_dsn = _validate_data_set_name(bk_dsn).upper() try: @@ -128,7 +128,10 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): except exceptions.ZOAUException as copy_exception: cp_rc = copy_exception.response.rc else: - cp_rc = _copy_ds(dsn, bk_dsn) + if DataSet.is_gds_positive_relative_name(bk_dsn): + cp_rc = datasets.copy(dsn, bk_dsn) + else: + cp_rc = _copy_ds(dsn, bk_dsn) if cp_rc == 12: # The data set is probably a PDS or PDSE # Delete allocated backup that was created when attempting to use _copy_ds() @@ -242,8 +245,8 @@ def _copy_ds(ds, bk_ds): module = AnsibleModuleHelper(argument_spec={}) _allocate_model(bk_ds, ds) repro_cmd = """ REPRO - - INDATASET({0}) - - OUTDATASET({1})""".format( + INDATASET('{0}') - + OUTDATASET('{1}')""".format( ds, bk_ds ) rc, out, err = module.run_command( diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 210218a43..7b294f6d4 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1345,6 +1345,23 @@ def is_gds_relative_name(name): match = re.fullmatch(pattern, name) return bool(match) + @staticmethod + def is_gds_positive_relative_name(name): + """Determine if name is a gdg relative positive name + based on the GDS relative name syntax eg. 'USER.GDG(+1)'. + Parameters + ---------- + name : str + Data set name to determine if is a GDS relative name. + Returns + ------- + bool + Whether the name is a GDS positive relative name. + """ + pattern = r'(.+)\(([\\]?[+]\d+)\)' + match = re.fullmatch(pattern, name) + return bool(match) + @staticmethod def resolve_gds_absolute_name(relative_name): """Given a GDS relative name, returns its absolute name. @@ -1727,6 +1744,7 @@ def _gather_data_set_info(self): dict -- Dictionary containing data set attributes """ result = dict() + self.data_set = self.data_set.upper().replace("\\", '') listds_rc, listds_out, listds_err = mvs_cmd.ikjeft01( " LISTDS '{0}'".format(self.data_set), authorized=True ) diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index e10ef522e..8c1485152 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -38,6 +38,8 @@ - The location can be a UNIX System Services (USS) file, PS (sequential data set), member of a PDS or PDSE, PDS, PDSE. - The USS file must be an absolute pathname. + - Generation data set (GDS) relative name of generation already + created. C(e.g. SOME.CREATION(-1).) type: str aliases: [ path, destfile, name ] required: true @@ -110,6 +112,7 @@ - When set to C(true), the module creates a backup file or data set. - The backup file name will be returned on either success or failure of module execution such that data can be retrieved. + - Use generation data set (GDS) relative positive name. C(e.g. SOME.CREATION(+1)) required: false type: bool default: false @@ -279,6 +282,20 @@ marker_begin: "Begin Ansible Block Insertion 2" marker_end: "End Ansible Block Insertion 2" block: "{{ CONTENT }}" + +- name: Add a block to a gds + zos_blockinfile: + src: TEST.SOME.CREATION(0) + insertafter: EOF + block: "{{ CONTENT }}" + +- name: Add a block to dataset and backup in a new generation of gds + zos_blockinfile: + src: SOME.CREATION.TEST + insertbefore: BOF + backup: True + backup_name: CREATION.GDS(+1) + block: "{{ CONTENT }}" ''' RETURN = r""" @@ -456,6 +473,18 @@ def quotedString(string): def quotedString_double_quotes(string): + """Deletes the quote mark on strings. + + Parameters + ---------- + string : str + String to modify quote marks from. + + Returns + ------- + str + String scaping the quote marks. + """ # add escape if string was quoted if not isinstance(string, str): return string @@ -463,6 +492,25 @@ def quotedString_double_quotes(string): def check_double_quotes(marker, ins_bef, ins_aft, block): + """Verify the content of strings to determine if double + quotes are in the string. + + Parameters + ---------- + marker : str + String to verify quote marks from. + ins_bef : str + String to verify quote marks from. + ins_aft : str + String to verify quote marks from. + block : str + String to verify quote marks from. + + Returns + ------- + bool + If any string contain double quotes. + """ if marker: if '"' in marker: return True @@ -479,6 +527,42 @@ def check_double_quotes(marker, ins_bef, ins_aft, block): def execute_dmod(src, block, marker, force, encoding, state, module, ins_bef=None, ins_aft=None): + """Execute in terminal dmod command directly. + + Parameters + ---------- + src : str + The z/OS USS file or data set to modify. + block : str + The block to insert/replace into the src. + marker : str + The block will be inserted/updated with the markers. + force : bool + If not empty passes True option to dmod cmd. + encoding : str + Encoding of the src. + state : bool + Determine if will add or delete the block. + module : obj + Object to execute the command. + ins_bef : str + Insert the block before matching '*regex*' pattern or BOF. + choices: + - BOF + - '*regex*' + ins_aft : str + Insert the block after matching '*regex*' pattern or EOF. + choices: + - EOF + - '*regex*' + + Returns + ------- + int + RC of the execution of the command. + cmd + Command executed. + """ block = block.replace('"', '\\"') force = "-f" if force else "" encoding = "-c {0}".format(encoding) if encoding else "" @@ -505,6 +589,18 @@ def execute_dmod(src, block, marker, force, encoding, state, module, ins_bef=Non def clean_command(cmd): + """Deletes escaped characters from the str. + + Parameters + ---------- + cmd : str + Command to clean any escaped characters. + + Returns + ------- + str + Command without escaped characters. + """ cmd = cmd.replace('/c\\\\', '') cmd = cmd.replace('/a\\\\', '', ) cmd = cmd.replace('/i\\\\', '', ) @@ -667,18 +763,25 @@ def main(): marker = "{0}\\n{1}\\n{2}".format(marker_begin, marker_end, marker) block = transformBlock(block, ' ', indentation) # analysis the file type + if "/" not in src: + dataset = data_set.MVSDataSet( + name=src + ) + src = dataset.name + + if data_set.DataSet.is_gds_relative_name(src): + module.fail_json(msg="{0} does not exist".format(src)) + ds_utils = data_set.DataSetUtils(src) if not ds_utils.exists(): message = "{0} does NOT exist".format(str(src)) module.fail_json(msg=message) file_type = ds_utils.ds_type() - if file_type == 'USS': - file_type = 1 - else: + + if file_type != "USS": if file_type not in DS_TYPE: message = "{0} data set type is NOT supported".format(str(file_type)) module.fail_json(msg=message) - file_type = 0 return_content = None if backup: @@ -688,7 +791,7 @@ def main(): if isinstance(backup, bool): backup = None try: - if file_type: + if file_type == "USS": result['backup_name'] = Backup.uss_file_backup(src, backup_name=backup, compress=False) else: result['backup_name'] = Backup.mvs_file_backup(dsn=src, bk_dsn=backup, tmphlq=tmphlq) diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 0988ef2d4..38fb5d116 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -36,6 +36,8 @@ - The location can be a UNIX System Services (USS) file, PS (sequential data set), member of a PDS or PDSE, PDS, PDSE. - The USS file must be an absolute pathname. + - Generation data set (GDS) relative name of generation already + created. C(e.g. SOME.CREATION(-1).) type: str aliases: [ path, destfile, name ] required: true @@ -127,6 +129,7 @@ if I(backup=true). - The backup file name will be return on either success or failure of module execution such that data can be retrieved. + - Use generation data set (GDS) relative positive name SOME.CREATION(+1) required: false type: bool default: false @@ -237,6 +240,20 @@ insertafter: EOF line: 'Should be a working test now' force: true + +- name: Add a line to a gds + zos_lineinfile: + src: SOME.CREATION(-2) + insertafter: EOF + line: 'Should be a working test now' + +- name: Add a line to dataset and backup in a new generation of gds + zos_lineinfile: + src: SOME.CREATION.TEST + insertafter: EOF + backup: True + backup_name: CREATION.GDS(+1) + line: 'Should be a working test now' """ RETURN = r""" @@ -377,6 +394,148 @@ def absent(src, line, regexp, encoding, force): return datasets.lineinfile(src, line, regex=regexp, encoding=encoding, state=False, debug=True, force=force) +def execute_dsed(src, state, encoding, module, line=False, first_match=False, force=False, backrefs=False, regex=None, ins_bef=None, ins_aft=None): + """Execute in terminal dsed command directly + + Parameters + ---------- + src : str + The z/OS USS file or data set to modify. + state : bool + Determine if will add or delete the line. + encoding : str + Encoding of the src. + module : obj + Object to execute the command. + line : str + The line to insert/replace into the src. + regex : str + The regular expression to look for in every line of the src. + If regexp matches, ins_aft/ins_bef will be ignored. + ins_aft : str + Insert the line after matching '*regex*' pattern or EOF. + choices: + - EOF + - '*regex*' + ins_bef : str + Insert the line before matching '*regex*' pattern or BOF. + choices: + - BOF + - '*regex*' + first_match : bool + Take the first matching regex pattern. + backrefs : bool + Back reference. + force : bool + force for modify a member part of a task in execution. + + Returns + ------- + int + RC of the execution of the command. + cmd + Command executed. + stdout + Stdout of the command execution. + """ + options = "" + force = " -f " if force else "" + backrefs = " -r " if backrefs else "" + encoding = " -c {0} ".format(encoding) + match = "1" if first_match else "$" + + if state: + if regex: + if ins_aft: + if ins_aft == "EOF" or ins_aft == "eof": + options += f' -s -e "/{regex}/c\\{line}/{match}" -e "$ a\\{line}" "{src}" ' + else: + options += f' -s -e "/{regex}/c\\{line}/{match}" -e "/{ins_aft}/a\\{line}/{match}" -e "$ a\\{line}" "{src}" ' + + elif ins_bef: + if ins_bef == "BOF" or ins_aft == "bof": + options += f' -s -e "/{regex}/c\\{line}/{match}" -e "1 i\\{line}" "{src}" ' + else: + options += f' -s -e "/{regex}/c\\{line}/{match}" -e "/{ins_bef}/i\\{line}/{match}" -e "$ a\\{line}" "{src}" ' + else: + options += f' "/{regex}/c\\{line}/{match}" "{src}" ' + else: + if ins_aft: + if ins_aft == "EOF" or ins_aft == "eof": + options += f' "$ a\\{line}" "{src}" ' + else: + options += f' -s -e "/{ins_aft}/a\\{line}/{match}" -e "$ a\\{line}" "{src}" ' + elif ins_bef: + if ins_bef == "BOF" or ins_aft == "bof": + options += f' "1 i\\{line}" "{src}" ' + else: + options += f' -s -e "/{ins_bef}/i\\{line}/{match}" -e "$ a\\{line}" "{src}" ' + else: + raise ValueError("Incorrect parameters required regex and/or ins_aft or ins_bef") + else: + if regex: + if line: + options += f'-s -e "/{regex}/d" -e "/{line}/d" "{src}" ' + else: + options += f'"/{line}/d" "{src}" ' + else: + options += f'"/{line}/d" "{src}" ' + + cmd = "dsed {0}{1}{2}{3}".format(force, backrefs, encoding, options) + + rc, stdout, stderr = module.run_command(cmd) + cmd = clean_command_output(cmd) + return rc, cmd, stdout + + +def clean_command_output(cmd): + """Deletes escaped characters from the str. + + Parameters + ---------- + cmd : str + Command to clean any escaped characters. + + Returns + ------- + str + Command without escaped character. + """ + cmd = cmd.replace('/c\\\\', '') + cmd = cmd.replace('/a\\\\', '', ) + cmd = cmd.replace('/i\\\\', '', ) + cmd = cmd.replace('$ a\\\\', '', ) + cmd = cmd.replace('1 i\\\\', '', ) + cmd = cmd.replace('/c\\', '') + cmd = cmd.replace('/a\\', '') + cmd = cmd.replace('/i\\', '') + cmd = cmd.replace('$ a\\', '') + cmd = cmd.replace('1 i\\', '') + cmd = cmd.replace('/d', '') + cmd = cmd.replace('\\\\d', '') + cmd = cmd.replace('\\n', '\n') + cmd = cmd.replace('\\"', '"') + return cmd + + +def check_special_characters(src): + """Verify if the string contains special characters + such as $ @ # -. + + Parameters + ---------- + string : str + Given string. + + Returns + ------- + bool + If the string match any special character. + """ + special_characters = ['$', '@', '#', '-'] + return any(character in special_characters for character in src) + + def quotedString(string): """Add escape if string was quoted. @@ -474,7 +633,6 @@ def main(): module.fail_json(msg="Parameter verification failed", stderr=str(err)) backup = parsed_args.get('backup') - # if backup_name is provided, update backup variable if parsed_args.get('backup_name') and backup: backup = parsed_args.get('backup_name') backrefs = parsed_args.get('backrefs') @@ -500,7 +658,22 @@ def main(): if regexp is None and line is None: module.fail_json(msg='one of line or regexp is required with state=absent') + is_gds = False + has_special_chars = False + dmod_exec = False + return_content = "" + # analysis the file type + if "/" not in src: + dataset = data_set.MVSDataSet( + name=src + ) + src = dataset.name + is_gds = dataset.is_gds_active + + if data_set.DataSet.is_gds_relative_name(src) and is_gds is False: + module.fail_json(msg="{0} does not exist".format(src)) + ds_utils = data_set.DataSetUtils(src) # Check if dest/src exists @@ -508,24 +681,21 @@ def main(): module.fail_json(msg="{0} does not exist".format(src)) file_type = ds_utils.ds_type() - if file_type == 'USS': - file_type = 1 - else: + if file_type != "USS": + has_special_chars = check_special_characters(src) if file_type not in DS_TYPE: message = "{0} data set type is NOT supported".format(str(file_type)) module.fail_json(msg=message) - file_type = 0 + + dmod_exec = has_special_chars or is_gds # make sure the default encoding is set if null was passed if not encoding: encoding = "IBM-1047" if backup: - # backup can be True(bool) or none-zero length string. string indicates that backup_name was provided. - # setting backup to None if backup_name wasn't provided. if backup=None, Backup module will use - # pre-defined naming scheme and return the created destination name. if isinstance(backup, bool): backup = None try: - if file_type: + if file_type == "USS": result['backup_name'] = Backup.uss_file_backup(src, backup_name=backup, compress=False) else: result['backup_name'] = Backup.mvs_file_backup(dsn=src, bk_dsn=backup, tmphlq=tmphlq) @@ -534,38 +704,57 @@ def main(): # state=present, insert/replace a line with matching regex pattern # state=absent, delete lines with matching regex pattern if parsed_args.get('state') == 'present': - return_content = present(src, quotedString(line), quotedString(regexp), quotedString(ins_aft), quotedString(ins_bef), encoding, firstmatch, - backrefs, force) + if dmod_exec: + rc, cmd, stodut = execute_dsed(src, state=True, encoding=encoding, module=module, line=line, first_match=firstmatch, + force=force, backrefs=backrefs, regex=regexp, ins_bef=ins_bef, ins_aft=ins_aft) + result['rc'] = rc + result['cmd'] = cmd + result['stodut'] = stodut + result['changed'] = True if rc == 0 else False + stderr = 'Failed to insert new entry' if rc != 0 else "" + else: + return_content = present(src, quotedString(line), quotedString(regexp), quotedString(ins_aft), quotedString(ins_bef), encoding, firstmatch, + backrefs, force) else: - return_content = absent(src, quotedString(line), quotedString(regexp), encoding, force) - stdout = return_content.stdout_response - stderr = return_content.stderr_response - rc = return_content.rc - stdout = stdout.replace('/c\\', '/c\\\\') - stdout = stdout.replace('/a\\', '/a\\\\') - stdout = stdout.replace('/i\\', '/i\\\\') - stdout = stdout.replace('$ a\\', '$ a\\\\') - stdout = stdout.replace('1 i\\', '1 i\\\\') - stdout = stdout.replace('/d', '\\\\d') - if line: - stdout = stdout.replace(line, quotedString(line)) - if regexp: - stdout = stdout.replace(regexp, quotedString(regexp)) - if ins_aft: - stdout = stdout.replace(ins_aft, quotedString(ins_aft)) - if ins_bef: - stdout = stdout.replace(ins_bef, quotedString(ins_bef)) - try: - ret = json.loads(stdout) - except Exception: - messageDict = dict(msg="dsed return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) - if result.get('backup_name'): - messageDict['backup_name'] = result['backup_name'] - module.fail_json(**messageDict) - - result['cmd'] = ret['cmd'] - result['changed'] = ret['changed'] - result['found'] = ret['found'] + if dmod_exec: + rc, cmd, stodut = execute_dsed(src, state=False, encoding=encoding, module=module, line=line, first_match=firstmatch, force=force, + backrefs=backrefs, regex=regexp, ins_bef=ins_bef, ins_aft=ins_aft) + result['rc'] = rc + result['cmd'] = cmd + result['stodut'] = stodut + result['changed'] = True if rc == 0 else False + stderr = 'Failed to insert new entry' if rc != 0 else "" + else: + return_content = absent(src, quotedString(line), quotedString(regexp), encoding, force) + if not dmod_exec: + stdout = return_content.stdout_response + stderr = return_content.stderr_response + rc = return_content.rc + stdout = stdout.replace('/c\\', '/c\\\\') + stdout = stdout.replace('/a\\', '/a\\\\') + stdout = stdout.replace('/i\\', '/i\\\\') + stdout = stdout.replace('$ a\\', '$ a\\\\') + stdout = stdout.replace('1 i\\', '1 i\\\\') + stdout = stdout.replace('/d', '\\\\d') + if line: + stdout = stdout.replace(line, quotedString(line)) + if regexp: + stdout = stdout.replace(regexp, quotedString(regexp)) + if ins_aft: + stdout = stdout.replace(ins_aft, quotedString(ins_aft)) + if ins_bef: + stdout = stdout.replace(ins_bef, quotedString(ins_bef)) + try: + ret = json.loads(stdout) + except Exception: + messageDict = dict(msg="dsed return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) + if result.get('backup_name'): + messageDict['backup_name'] = result['backup_name'] + module.fail_json(**messageDict) + + result['cmd'] = ret['cmd'] + result['changed'] = ret['changed'] + result['found'] = ret['found'] # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case # That information will be given with 'changed' and 'found' if len(stderr): diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 635da733e..713a9873e 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1278,6 +1278,86 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): hosts.all.shell(cmd='rm -r /tmp/disp_shr') hosts.all.zos_data_set(name=default_data_set_name, state="absent") + +@pytest.mark.ds +def test_gdd_ds_insert_block(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="EOF", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + ds_name = get_tmp_ds_name(3, 2) + try: + # Set environment + hosts.all.shell(cmd="dtouch -tGDG -L3 {0}".format(ds_name)) + hosts.all.shell(cmd="""dtouch -tseq "{0}(+1)" """.format(ds_name)) + hosts.all.shell(cmd="""dtouch -tseq "{0}(+1)" """.format(ds_name)) + + params["src"] = ds_name + "(0)" + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) + for result in results.contacted.values(): + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + + params["src"] = ds_name + "(-1)" + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) + for result in results.contacted.values(): + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + + params_w_bck = dict(insertafter="eof", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=ds_name + "(+1)") + params_w_bck["src"] = ds_name + "(-1)" + results = hosts.all.zos_blockinfile(**params_w_bck) + for result in results.contacted.values(): + assert result.get("changed") == 1 + assert result.get("rc") == 0 + backup = ds_name + "(0)" + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) + for result in results.contacted.values(): + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + + params["src"] = ds_name + "(-3)" + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 0 + finally: + hosts.all.shell(cmd="""drm "ANSIBLE.*" """) + + +@pytest.mark.ds +def test_special_characters_ds_insert_block(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="eof", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + ds_name = get_tmp_ds_name(5, 5, symbols=True) + backup = get_tmp_ds_name(6, 6, symbols=True) + try: + result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + + params["src"] = ds_name + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + src = ds_name.replace('$', "\$") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) + for result in results.contacted.values(): + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + + params_w_bck = dict(insertafter="eof", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=backup) + params_w_bck["src"] = ds_name + results = hosts.all.zos_blockinfile(**params_w_bck) + for result in results.contacted.values(): + assert result.get("changed") == 1 + assert result.get("rc") == 0 + backup = backup.replace('$', "\$") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) + for result in results.contacted.values(): + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + + finally: + hosts.all.shell(cmd="""drm "ANSIBLE.*" """) + + ######################### # Encoding tests ######################### diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index a9a29227d..700fefe1c 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -463,7 +463,7 @@ def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): @pytest.mark.uss def test_uss_line_absent(ansible_zos_module): hosts = ansible_zos_module - params = dict(regexp="ZOAU_ROOT=", line="", state="absent") + params = dict(regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/usr/lpp/zoautil/v100", state="absent") full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -471,6 +471,7 @@ def test_uss_line_absent(ansible_zos_module): params["path"] = full_path results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): + print(result) assert result.get("changed") == 1 results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): @@ -675,7 +676,97 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): finally: remove_ds_environment(ansible_zos_module, ds_name) -#GH Issue #1244 / JIRA NAZARE-10439 + +@pytest.mark.ds +def test_gdd_ds_insert_line(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="eof", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + ds_name = get_tmp_ds_name(3, 2) + try: + # Set environment + hosts.all.shell(cmd="dtouch -tGDG -L3 {0}".format(ds_name)) + hosts.all.shell(cmd="""dtouch -tseq "{0}(+1)" """.format(ds_name)) + hosts.all.shell(cmd="""dtouch -tseq "{0}(+1)" """.format(ds_name)) + + params["src"] = ds_name + "(0)" + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + cmd = result.get("cmd").split() + for cmd_p in cmd: + if ds_name in cmd_p: + dataset = cmd_p + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(dataset)) + for result in results.contacted.values(): + assert result.get("stdout") == "ZOAU_ROOT=/mvsutil-develop_dsed" + + params["src"] = ds_name + "(-1)" + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + cmd = result.get("cmd").split() + for cmd_p in cmd: + if ds_name in cmd_p: + dataset = cmd_p + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(dataset)) + for result in results.contacted.values(): + assert result.get("stdout") == "ZOAU_ROOT=/mvsutil-develop_dsed" + + params_w_bck = dict(insertafter="eof", line="export ZOAU_ROOT", state="present", backup=True, backup_name=ds_name + "(+1)") + params_w_bck["src"] = ds_name + "(-1)" + results = hosts.all.zos_lineinfile(**params_w_bck) + for result in results.contacted.values(): + assert result.get("changed") == 1 + assert result.get("rc") == 0 + backup = ds_name + "(0)" + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) + for result in results.contacted.values(): + assert result.get("stdout") == "ZOAU_ROOT=/mvsutil-develop_dsed" + + params["src"] = ds_name + "(-3)" + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 0 + finally: + hosts.all.shell(cmd="""drm "{0}*" """.format(ds_name)) + + +@pytest.mark.ds +def test_special_characters_ds_insert_line(ansible_zos_module): + hosts = ansible_zos_module + params = dict(insertafter="eof", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + ds_name = get_tmp_ds_name(5, 5, symbols=True) + backup = get_tmp_ds_name(6, 6, symbols=True) + try: + # Set environment + result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + + params["src"] = ds_name + results = hosts.all.zos_lineinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + src = ds_name.replace('$', "\$") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) + for result in results.contacted.values(): + assert result.get("stdout") == "ZOAU_ROOT=/mvsutil-develop_dsed" + + params_w_bck = dict(insertafter="eof", line="export ZOAU_ROOT", state="present", backup=True, backup_name=backup) + params_w_bck["src"] = ds_name + results = hosts.all.zos_lineinfile(**params_w_bck) + for result in results.contacted.values(): + print(result) + assert result.get("changed") == 1 + assert result.get("rc") == 0 + backup = backup.replace('$', "\$") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) + for result in results.contacted.values(): + assert result.get("stdout") == "ZOAU_ROOT=/mvsutil-develop_dsed" + + finally: + hosts.all.shell(cmd="""drm "ANSIBLE.*" """.format(ds_name)) + + +#GH Issue #1244 #@pytest.mark.ds #@pytest.mark.parametrize("dstype", DS_TYPE) #def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): @@ -773,7 +864,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): def test_ds_line_absent(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(regexp="ZOAU_ROOT=", line="", state="absent") + params = dict(regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/usr/lpp/zoautil/v100", state="absent") ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -973,7 +1064,7 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): # Run lineinfle module with same params again, ensure duplicate entry is not made into file hosts.all.zos_lineinfile(**params) results = hosts.all.shell(cmd="""dgrep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' "{0}" """.format(params["path"])) - response = params["path"] + " " + "1" + response = params["path"] + " " + "1" for result in results.contacted.values(): assert result.get("stdout") == response finally: From ab5678404b9762cfa6da52b229569e818f7874b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 4 Jun 2024 14:17:00 -0600 Subject: [PATCH 395/495] [Enabler][1377]mvs_raw_support_gdg_gds_special_character (#1525) * Add first iteration mvs_raw * Add soultion * Add fragment * Add documentation * Update plugins/modules/zos_mvs_raw.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Update plugins/modules/zos_mvs_raw.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Fix raise * Update plugins/modules/zos_mvs_raw.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ..._raw_support_gdg_gds_special_character.yml | 4 + plugins/modules/zos_mvs_raw.py | 76 ++++++++++++- .../modules/test_zos_mvs_raw_func.py | 107 ++++++++++++++++++ 3 files changed, 186 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1525-mvs_raw_support_gdg_gds_special_character.yml diff --git a/changelogs/fragments/1525-mvs_raw_support_gdg_gds_special_character.yml b/changelogs/fragments/1525-mvs_raw_support_gdg_gds_special_character.yml new file mode 100644 index 000000000..969347bdc --- /dev/null +++ b/changelogs/fragments/1525-mvs_raw_support_gdg_gds_special_character.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_mvs_raw - Added support for GDG and GDS relative name notation to use a data set. + Added support for data set names with special characters like $, /#, /- and @. + (https://github.com/ansible-collections/ibm_zos_core/pull/1525). \ No newline at end of file diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index d05fef6db..45f89e023 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -88,6 +88,8 @@ data_set_name: description: - The data set name. + - A data set name can be a GDS relative name. + - When using GDS relative name and it is a positive generation, disposition new must be used. type: str required: false type: @@ -705,6 +707,8 @@ data_set_name: description: - The data set name. + - A data set name can be a GDS relative name. + - When using GDS relative name and it is a positive generation, disposition new must be used. type: str required: false type: @@ -1582,6 +1586,37 @@ RECORDSIZE(4086 32600) - VOLUMES(222222) - UNIQUE) + +- name: List data sets matching pattern in catalog, + save output to a new generation of gdgs. + zos_mvs_raw: + program_name: idcams + auth: true + dds: + - dd_data_set: + dd_name: sysprint + data_set_name: TEST.CREATION(+1) + disposition: new + return_content: + type: text + - dd_input: + dd_name: sysin + content: " LISTCAT ENTRIES('SOME.DATASET.*')" + +- name: List data sets matching pattern in catalog, + save output to a gds already created. + zos_mvs_raw: + program_name: idcams + auth: true + dds: + - dd_data_set: + dd_name: sysprint + data_set_name: TEST.CREATION(-2) + return_content: + type: text + - dd_input: + dd_name: sysin + content: " LISTCAT ENTRIES('SOME.DATASET.*')" """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( @@ -1602,7 +1637,7 @@ RawInputDefinition, RawOutputDefinition, ) - +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import data_set from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, @@ -2545,6 +2580,9 @@ def get_dd_name_and_key(dd): key = "" if dd.get("dd_data_set"): dd_name = dd.get("dd_data_set").get("dd_name") + data_set_name = resolve_data_set_names(dd.get("dd_data_set").get("data_set_name"), + dd.get("dd_data_set").get("disposition")) + dd.get("dd_data_set")["data_set_name"] = data_set_name key = "dd_data_set" elif dd.get("dd_unix"): dd_name = dd.get("dd_unix").get("dd_name") @@ -2589,6 +2627,42 @@ def set_extra_attributes_in_dd(dd, tmphlq, key): return dd +def resolve_data_set_names(dataset, disposition): + """Resolve cases for data set names as relative gds or positive + that could be accepted if disposition is new. + Parameters + ---------- + dataset : str + Data set name to determine if is a GDS relative name or regular name. + disposition : str + Disposition of data set for it creation. + Returns + ------- + str + The absolute name of dataset or relative positive if disposition is new. + """ + if data_set.DataSet.is_gds_relative_name(dataset): + if data_set.DataSet.is_gds_positive_relative_name(dataset): + if disposition and disposition == "new": + return dataset + else: + raise ("To generate a new GDS as {0} disposition 'new' is required.".format(dataset)) + else: + data = data_set.MVSDataSet( + name=dataset + ) + src = data.name + if data.is_gds_active: + if disposition and disposition == "new": + raise ("GDS {0} already created, incorrect parameters for disposition and data_set_name".format(src)) + else: + return src + else: + raise ("{0} does not exist".format(src)) + else: + return dataset + + def build_data_definition(dd): """Build a DataDefinition object for a particular DD parameter. diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index 49e55290d..f1f901064 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -668,6 +668,113 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): hosts.all.zos_data_set(name=default_data_set, state="absent") +def test_data_set_name_gdgs(ansible_zos_module): + try: + hosts = ansible_zos_module + default_data_set = get_tmp_ds_name(3, 3) + hosts.all.shell(cmd="dtouch -tGDG -L4 {0}".format(default_data_set)) + hosts.all.shell(cmd="""dtouch -tseq "{0}(+1)" """.format(default_data_set)) + hosts.all.shell(cmd="""dtouch -tseq "{0}(+1)" """.format(default_data_set)) + results = hosts.all.zos_mvs_raw( + program_name="idcams", + auth=True, + dds=[ + dict( + dd_data_set=dict( + dd_name=SYSPRINT_DD, + data_set_name=default_data_set + "(0)", + return_content=dict(type="text"), + ), + ), + dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", -1) == 0 + assert len(result.get("dd_names", [])) > 0 + # Generation minus 1 + results = hosts.all.zos_mvs_raw( + program_name="idcams", + auth=True, + dds=[ + dict( + dd_data_set=dict( + dd_name=SYSPRINT_DD, + data_set_name=default_data_set + "(-1)", + return_content=dict(type="text"), + ), + ), + dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", -1) == 0 + assert len(result.get("dd_names", [])) > 0 + # Create a new one + results = hosts.all.zos_mvs_raw( + program_name="idcams", + auth=True, + dds=[ + dict( + dd_data_set=dict( + dd_name=SYSPRINT_DD, + data_set_name=default_data_set + "(+1)", + disposition="new", + return_content=dict(type="text"), + ), + ), + dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", -1) == 0 + assert len(result.get("dd_names", [])) > 0 + # Negative case + results = hosts.all.zos_mvs_raw( + program_name="idcams", + auth=True, + dds=[ + dict( + dd_data_set=dict( + dd_name=SYSPRINT_DD, + data_set_name=default_data_set + "(-4)", + disposition="new", + return_content=dict(type="text"), + ), + ), + dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", -1) == 8 + finally: + hosts.all.shell(cmd="""drm "ANSIBLE.*" """) + + +def test_data_set_name_special_characters(ansible_zos_module): + try: + hosts = ansible_zos_module + default_data_set = get_tmp_ds_name(5, 6, symbols=True) + hosts.all.zos_data_set(name=default_data_set, type="seq", state="present") + results = hosts.all.zos_mvs_raw( + program_name="idcams", + auth=True, + dds=[ + dict( + dd_data_set=dict( + dd_name=SYSPRINT_DD, + data_set_name=default_data_set, + return_content=dict(type="text"), + ), + ), + dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", -1) == 0 + assert len(result.get("dd_names", [])) > 0 + finally: + hosts.all.shell(cmd="""drm "ANSIBLE.*" """) # ---------------------------------------------------------------------------- # # Input DD Tests # # ---------------------------------------------------------------------------- # From 2b1188ca2d65ac60416eea6bff8d714762c1efea Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 4 Jun 2024 13:27:38 -0700 Subject: [PATCH 396/495] [Enabler] [zos_fetch] Add gdg support to zos_fetch (#1519) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add support for GDGs * Update module docs * Fix fetch when requesting GDSs * Add first version of GDG support tests * Fix file name for GDS fetch * Fix source existence check * Add negative GDS test * Add GDG fetching * Update zos_fetch's RST * Add changelog fragment * Fix pep8 issues * Update special chars test --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .../fragments/1519-zos_fetch-gdg-support.yml | 4 + docs/source/modules/zos_fetch.rst | 24 +- plugins/action/zos_fetch.py | 119 +++++---- plugins/module_utils/data_set.py | 11 +- plugins/modules/zos_fetch.py | 238 ++++++++++++++---- .../functional/modules/test_zos_fetch_func.py | 134 +++++++++- 6 files changed, 436 insertions(+), 94 deletions(-) create mode 100644 changelogs/fragments/1519-zos_fetch-gdg-support.yml diff --git a/changelogs/fragments/1519-zos_fetch-gdg-support.yml b/changelogs/fragments/1519-zos_fetch-gdg-support.yml new file mode 100644 index 000000000..dd9126ddf --- /dev/null +++ b/changelogs/fragments/1519-zos_fetch-gdg-support.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_fetch - add support for fetching generation data groups and + generation data sets. + (https://github.com/ansible-collections/ibm_zos_core/pull/1519) \ No newline at end of file diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 7cdcabbd5..23d58c864 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -16,11 +16,13 @@ zos_fetch -- Fetch data from z/OS Synopsis -------- -- This module fetches a UNIX System Services (USS) file, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or KSDS (VSAM data set) from a remote z/OS system. +- This module fetches a UNIX System Services (USS) file, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, generation data set (GDS), generation data group (GDG), or KSDS (VSAM data set) from a remote z/OS system. - When fetching a sequential data set, the destination file name will be the same as the data set name. - When fetching a PDS or PDSE, the destination will be a directory with the same name as the PDS or PDSE. - When fetching a PDS/PDSE member, destination will be a file. - Files that already exist at \ :literal:`dest`\ will be overwritten if they are different than \ :literal:`src`\ . +- When fetching a GDS, the relative name will be resolved to its absolute one. +- When fetching a generation data group, the destination will be a directory with the same name as the GDG. @@ -31,7 +33,7 @@ Parameters src - Name of a UNIX System Services (USS) file, PS (sequential data set), PDS, PDSE, member of a PDS, PDSE or KSDS (VSAM data set). + Name of a UNIX System Services (USS) file, PS (sequential data set), PDS, PDSE, member of a PDS, PDSE, GDS, GDG or KSDS (VSAM data set). USS file paths should be absolute paths. @@ -187,6 +189,24 @@ Examples to: ISO8859-1 flat: true + - name: Fetch the current generation data set from a GDG + zos_fetch: + src: USERHLQ.DATA.SET(0) + dest: /tmp/ + flat: true + + - name: Fetch a previous generation data set from a GDG + zos_fetch: + src: USERHLQ.DATA.SET(-3) + dest: /tmp/ + flat: true + + - name: Fetch a generation data group + zos_fetch: + src: USERHLQ.TEST.GDG + dest: /tmp/ + flat: true + diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 56232f34f..192b9ce6f 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -26,9 +26,15 @@ from ansible.utils.display import Display from ansible import cli -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import encode, validation, data_set -SUPPORTED_DS_TYPES = frozenset({"PS", "PO", "VSAM", "USS"}) +SUPPORTED_DS_TYPES = frozenset({ + "PS", "SEQ", "BASIC", + "PO", "PE", "PDS", "PDSE", + "VSAM", "KSDS", + "GDG", + "USS" +}) display = Display() @@ -37,10 +43,15 @@ def _update_result(result, src, dest, ds_type="USS", is_binary=False): """ Helper function to update output result with the provided values """ data_set_types = { "PS": "Sequential", + "SEQ": "Sequential", + "BASIC": "Sequential", "PO": "Partitioned", - "PDSE": "Partitioned Extended", + "PDS": "Partitioned", "PE": "Partitioned Extended", + "PDSE": "Partitioned Extended", "VSAM": "VSAM", + "KSDS": "VSAM", + "GDG": "Generation Data Group", "USS": "USS", } updated_result = dict((k, v) for k, v in result.items()) @@ -101,6 +112,7 @@ class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): result = super(ActionModule, self).run(tmp, task_vars) del tmp + # ********************************************************** # # Parameter initializations # # ********************************************************** # @@ -139,12 +151,60 @@ def run(self, tmp=None, task_vars=None): return result ds_type = None - fetch_member = "(" in src and src.endswith(")") + fetch_member = data_set.is_member(src) if fetch_member: member_name = src[src.find("(") + 1: src.find(")")] src = self._connection._shell.join_path(src) src = self._remote_expand_user(src) + # ********************************************************** # + # Execute module on remote host # + # ********************************************************** # + new_module_args = self._task.args.copy() + encoding_to = None + if encoding: + encoding_to = encoding.get("to", None) + if encoding is None or encoding_to is None: + new_module_args.update( + dict(encoding=dict(to=encode.Defaults.get_default_system_charset())) + ) + remote_path = None + + try: + fetch_res = self._execute_module( + module_name="ibm.ibm_zos_core.zos_fetch", + module_args=new_module_args, + task_vars=task_vars + ) + ds_type = fetch_res.get("ds_type") + src = fetch_res.get("file") + remote_path = fetch_res.get("remote_path") + + if fetch_res.get("msg"): + result["msg"] = fetch_res.get("msg") + result["stdout"] = fetch_res.get("stdout") or fetch_res.get( + "module_stdout" + ) + result["stderr"] = fetch_res.get("stderr") or fetch_res.get( + "module_stderr" + ) + result["stdout_lines"] = fetch_res.get("stdout_lines") + result["stderr_lines"] = fetch_res.get("stderr_lines") + result["rc"] = fetch_res.get("rc") + result["failed"] = True + return result + + elif fetch_res.get("note"): + result["note"] = fetch_res.get("note") + return result + + except Exception as err: + result["msg"] = "Failure during module execution" + result["stderr"] = str(err) + result["stderr_lines"] = str(err).splitlines() + result["failed"] = True + return result + # ********************************************************** # # Determine destination path: # # 1. If the 'flat' parameter is 'false', then hostname # @@ -216,45 +276,9 @@ def run(self, tmp=None, task_vars=None): local_checksum = _get_file_checksum(dest) # ********************************************************** # - # Execute module on remote host # + # Fetch remote data. # ********************************************************** # - new_module_args = self._task.args.copy() - encoding_to = None - if encoding: - encoding_to = encoding.get("to", None) - if encoding is None or encoding_to is None: - new_module_args.update( - dict(encoding=dict(to=encode.Defaults.get_default_system_charset())) - ) - remote_path = None try: - fetch_res = self._execute_module( - module_name="ibm.ibm_zos_core.zos_fetch", - module_args=new_module_args, - task_vars=task_vars - ) - ds_type = fetch_res.get("ds_type") - src = fetch_res.get("file") - remote_path = fetch_res.get("remote_path") - - if fetch_res.get("msg"): - result["msg"] = fetch_res.get("msg") - result["stdout"] = fetch_res.get("stdout") or fetch_res.get( - "module_stdout" - ) - result["stderr"] = fetch_res.get("stderr") or fetch_res.get( - "module_stderr" - ) - result["stdout_lines"] = fetch_res.get("stdout_lines") - result["stderr_lines"] = fetch_res.get("stderr_lines") - result["rc"] = fetch_res.get("rc") - result["failed"] = True - return result - - elif fetch_res.get("note"): - result["note"] = fetch_res.get("note") - return result - if ds_type in SUPPORTED_DS_TYPES: if ds_type == "PO" and os.path.isfile(dest) and not fetch_member: result[ @@ -262,6 +286,12 @@ def run(self, tmp=None, task_vars=None): ] = "Destination must be a directory to fetch a partitioned data set" result["failed"] = True return result + if ds_type == "GDG" and os.path.isfile(dest): + result[ + "msg" + ] = "Destination must be a directory to fetch a generation data group" + result["failed"] = True + return result fetch_content = self._transfer_remote_content( dest, @@ -272,7 +302,7 @@ def run(self, tmp=None, task_vars=None): if fetch_content.get("msg"): return fetch_content - if validate_checksum and ds_type != "PO" and not is_binary: + if validate_checksum and ds_type != "GDG" and ds_type != "PO" and not is_binary: new_checksum = _get_file_checksum(dest) result["changed"] = local_checksum != new_checksum result["checksum"] = new_checksum @@ -286,6 +316,7 @@ def run(self, tmp=None, task_vars=None): ) result["failed"] = True return result + except Exception as err: result["msg"] = "Failure during module execution" result["stderr"] = str(err) @@ -311,7 +342,7 @@ def _transfer_remote_content( result = dict() _sftp_action = 'get' - if src_type == "PO": + if src_type == "PO" or src_type == "GDG": _sftp_action += ' -r' # add '-r` to clone the source trees # To support multiple Ansible versions we must do some version detection and act accordingly @@ -404,6 +435,6 @@ def _remote_cleanup(self, remote_path, src_type, encoding): # do not remove the original file. if not (src_type == "USS" and not encoding): rm_cmd = "rm -r {0}".format(remote_path) - if src_type != "PO": + if src_type != "PO" and src_type != "GDG": rm_cmd = rm_cmd.replace(" -r", "") self._connection.exec_command(rm_cmd) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 7b294f6d4..13cdf9af2 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -540,8 +540,8 @@ def data_set_type(name, volume=None): volume (str) -- The volume the data set may reside on. Returns: - str -- The type of the data set (one of "PS", "PO", "DA", "KSDS", - "ESDS", "LDS" or "RRDS"). + str -- The type of the data set (one of "PS", "PO", "DA", "GDG", + "KSDS", "ESDS", "LDS" or "RRDS"). None -- If the data set does not exist or ZOAU is not able to determine the type. """ @@ -551,10 +551,15 @@ def data_set_type(name, volume=None): data_sets_found = datasets.list_datasets(name) # Using the organization property when it's a sequential or partitioned - # dataset. VSAMs are not found by datasets.list_datasets. + # dataset. VSAMs and GDGs are not found by datasets.list_datasets. if len(data_sets_found) > 0: return data_sets_found[0].organization + # Now trying to list GDGs through gdgs. + data_sets_found = gdgs.list_gdg_names(name) + if len(data_sets_found) > 0: + return "GDG" + # Next, trying to get the DATA information of a VSAM through # LISTCAT. output = DataSet._get_listcat_data(name) diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 8b4d4809d..574c5923c 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -24,7 +24,8 @@ short_description: Fetch data from z/OS description: - This module fetches a UNIX System Services (USS) file, - PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or + PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, + generation data set (GDS), generation data group (GDG), or KSDS (VSAM data set) from a remote z/OS system. - When fetching a sequential data set, the destination file name will be the same as the data set name. @@ -33,6 +34,9 @@ - When fetching a PDS/PDSE member, destination will be a file. - Files that already exist at C(dest) will be overwritten if they are different than C(src). + - When fetching a GDS, the relative name will be resolved to its absolute one. + - When fetching a generation data group, the destination will be a directory + with the same name as the GDG. author: - "Asif Mahmud (@asifmahmud)" - "Demetrios Dimatos (@ddimatos)" @@ -40,7 +44,7 @@ src: description: - Name of a UNIX System Services (USS) file, PS (sequential data set), PDS, - PDSE, member of a PDS, PDSE or KSDS (VSAM data set). + PDSE, member of a PDS, PDSE, GDS, GDG or KSDS (VSAM data set). - USS file paths should be absolute paths. required: true type: str @@ -202,6 +206,24 @@ from: IBM-037 to: ISO8859-1 flat: true + +- name: Fetch the current generation data set from a GDG + zos_fetch: + src: USERHLQ.DATA.SET(0) + dest: /tmp/ + flat: true + +- name: Fetch a previous generation data set from a GDG + zos_fetch: + src: USERHLQ.DATA.SET(-3) + dest: /tmp/ + flat: true + +- name: Fetch a generation data group + zos_fetch: + src: USERHLQ.TEST.GDG + dest: /tmp/ + flat: true """ RETURN = r""" @@ -291,7 +313,7 @@ try: - from zoautil_py import datasets, mvscmd, ztypes + from zoautil_py import datasets, mvscmd, ztypes, gdgs except Exception: datasets = ZOAUImportError(traceback.format_exc()) mvscmd = ZOAUImportError(traceback.format_exc()) @@ -565,7 +587,7 @@ def _fetch_vsam(self, src, is_binary, encoding=None): Unable to delete temporary dataset. """ temp_ds = self._copy_vsam_to_temp_data_set(src) - file_path = self._fetch_mvs_data(temp_ds, is_binary, encoding) + file_path = self._fetch_mvs_data(temp_ds, is_binary, encoding=encoding) rc = datasets.delete(temp_ds) if rc != 0: os.remove(file_path) @@ -575,7 +597,7 @@ def _fetch_vsam(self, src, is_binary, encoding=None): return file_path - def _fetch_pdse(self, src, is_binary, encoding=None): + def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): """Copy a partitioned data set to a USS directory. If the data set is not being fetched in binary mode, encoding for all members inside the data set will be converted. @@ -585,7 +607,9 @@ def _fetch_pdse(self, src, is_binary, encoding=None): src : str Source of the dataset. is_binary : bool - If is binary. + If it is binary. + temp_dir : str + Parent directory for the temp directory of the copy. encoding : str The file encoding. @@ -601,7 +625,7 @@ def _fetch_pdse(self, src, is_binary, encoding=None): fail_json Error converting encoding of the member. """ - dir_path = tempfile.mkdtemp() + dir_path = tempfile.mkdtemp(dir=temp_dir) cmd = "cp -B \"//'{0}'\" {1}" if not is_binary: cmd = cmd.replace(" -B", "") @@ -642,7 +666,55 @@ def _fetch_pdse(self, src, is_binary, encoding=None): ) return dir_path - def _fetch_mvs_data(self, src, is_binary, encoding=None): + def _fetch_gdg(self, src, is_binary, encoding=None): + """Copy a generation data group to a USS directory. If the data set + is not being fetched in binary mode, encoding for all data sets inside + the GDG will be converted. + + Parameters + ---------- + src : str + Source of the generation data group. + is_binary : bool + If it is binary. + encoding : str + The file encoding. + + Returns + ------- + str + Directory path containing the files of the converted generation data sets. + + Raises + ------ + fail_json + Error copying a GDS to USS. + fail_json + Error converting encoding of a GDS. + """ + dir_path = tempfile.mkdtemp() + + data_group = gdgs.GenerationDataGroupView(src) + for current_gds in data_group.generations(): + if current_gds.organization in data_set.DataSet.MVS_SEQ: + self._fetch_mvs_data( + current_gds.name, + is_binary, + temp_dir=dir_path, + file_override=current_gds.name, + encoding=encoding + ) + elif current_gds.organization in data_set.DataSet.MVS_PARTITIONED: + self._fetch_pdse( + current_gds.name, + is_binary, + temp_dir=dir_path, + encoding=encoding + ) + + return dir_path + + def _fetch_mvs_data(self, src, is_binary, temp_dir=None, file_override=None, encoding=None): """Copy a sequential data set or a partitioned data set member to a USS file. @@ -651,7 +723,12 @@ def _fetch_mvs_data(self, src, is_binary, encoding=None): src : str Source of the dataset. is_binary : bool - If is binary. + If it is binary. + temp_dir : str + Parent directory for the temp directory of the copy. + file_override : str + File name that will override the random one made by Python when + creating a temp file. encoding : str The file encoding. @@ -667,12 +744,21 @@ def _fetch_mvs_data(self, src, is_binary, encoding=None): fail_json Error converting encoding of the dataset. """ - fd, file_path = tempfile.mkstemp() - os.close(fd) + if file_override: + file_path = file_override + + if temp_dir: + file_path = os.path.join(temp_dir, file_path) + else: + fd, file_path = tempfile.mkstemp(dir=temp_dir) + os.close(fd) + cmd = "cp -B \"//'{0}'\" {1}" if not is_binary: cmd = cmd.replace(" -B", "") + rc, out, err = self._run_command(cmd.format(src, file_path)) + if rc != 0: os.remove(file_path) self._fail_json( @@ -741,8 +827,10 @@ def run_module(): ) src = module.params.get("src") + hlq = None if module.params.get("use_qualifier"): - module.params["src"] = datasets.get_hlq() + "." + src + hlq = datasets.get_hlq() + module.params["src"] = hlq + "." + src # ********************************************************** # # Verify paramater validity # @@ -802,57 +890,90 @@ def run_module(): # ********************************************************** # res_args = dict() - _fetch_member = "(" in src and src.endswith(")") - ds_name = src if not _fetch_member else src[: src.find("(")] + src_data_set = None + ds_type = None + try: - ds_utils = data_set.DataSetUtils(ds_name) - if not ds_utils.exists(): + # Checking the source actually exists on the system. + if "/" in src: # USS + src_exists = os.path.exists(b_src) + else: # MVS + src_data_set = data_set.MVSDataSet(src) + is_member = data_set.is_member(src_data_set.name) + + if is_member: + src_exists = data_set.DataSet.data_set_member_exists(src_data_set.name) + else: + src_exists = data_set.DataSet.data_set_exists( + src_data_set.name + ) + + if not src_exists: if fail_on_missing: - module.fail_json( - msg=( - "The source '{0}' does not exist or is " - "uncataloged".format(ds_name) + if is_member: + module.fail_json( + msg=( + "The data set member '{0}' was not found inside data " + "set '{1}'" + ).format( + data_set.extract_member_name(src_data_set.raw_name), + data_set.extract_dsname(src_data_set.raw_name) + ) ) + else: + module.fail_json( + msg=( + "The source '{0}' does not exist or is " + "uncataloged.".format(src) + ) + ) + else: + module.exit_json( + note=("Source '{0}' was not found. No data was fetched.".format(src)) ) - module.exit_json( - note=("Source '{0}' was not found. No data was fetched".format(ds_name)) - ) - ds_type = ds_utils.ds_type() + + if "/" in src: + ds_type = "USS" + else: + ds_type = data_set.DataSet.data_set_type(data_set.extract_dsname(src_data_set.name)) + if not ds_type: - module.fail_json(msg="Unable to determine data set type") + module.fail_json(msg="Unable to determine source type. No data was fetched.") except Exception as err: module.fail_json( - msg="Error while gathering data set information", stderr=str(err) + msg="Error while gathering source information", stderr=str(err) ) # ********************************************************** # # Fetch a sequential data set # # ********************************************************** # - if ds_type == "PS": - file_path = fetch_handler._fetch_mvs_data(src, is_binary, encoding) + if ds_type in data_set.DataSet.MVS_SEQ: + file_path = fetch_handler._fetch_mvs_data( + src_data_set.name, + is_binary, + encoding=encoding + ) res_args["remote_path"] = file_path # ********************************************************** # # Fetch a partitioned data set or one of its members # # ********************************************************** # - elif ds_type == "PO": - if _fetch_member: - member_name = src[src.find("(") + 1: src.find(")")] - if not ds_utils.member_exists(member_name): - module.fail_json( - msg=( - "The data set member '{0}' was not found inside data " - "set '{1}'" - ).format(member_name, ds_name) - ) - file_path = fetch_handler._fetch_mvs_data(src, is_binary, encoding) + elif ds_type in data_set.DataSet.MVS_PARTITIONED: + if is_member: + file_path = fetch_handler._fetch_mvs_data( + src_data_set.name, + is_binary, + encoding=encoding + ) res_args["remote_path"] = file_path else: res_args["remote_path"] = fetch_handler._fetch_pdse( - src, is_binary, encoding + src_data_set.name, + is_binary, + encoding=encoding ) # ********************************************************** # @@ -864,18 +985,47 @@ def run_module(): module.fail_json( msg="File '{0}' does not have appropriate read permission".format(src) ) - file_path = fetch_handler._fetch_uss_file(src, is_binary, encoding) + file_path = fetch_handler._fetch_uss_file( + src, + is_binary, + encoding=encoding + ) res_args["remote_path"] = file_path # ********************************************************** # # Fetch a VSAM data set # # ********************************************************** # - elif ds_type == "VSAM": - file_path = fetch_handler._fetch_vsam(src, is_binary, encoding) + elif ds_type in data_set.DataSet.MVS_VSAM: + file_path = fetch_handler._fetch_vsam( + src_data_set.name, + is_binary, + encoding=encoding + ) res_args["remote_path"] = file_path - res_args["file"] = ds_name + # ********************************************************** # + # Fetch a GDG # + # ********************************************************** # + + elif ds_type == "GDG": + res_args["remote_path"] = fetch_handler._fetch_gdg( + src_data_set.name, + is_binary, + encoding=encoding + ) + + if ds_type == "USS": + res_args["file"] = src + else: + res_args["file"] = src_data_set.name + + # Removing the HLQ since the user is probably not expecting it. The module + # hasn't returned it ever since it was originally written. Changes made to + # add GDG/GDS support started leaving the HLQ behind in the file name. + if hlq: + res_args["file"] = res_args["file"].replace(f"{hlq}.", "") + res_args["ds_type"] = ds_type module.exit_json(**res_args) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 7fd44651e..0a1a31c48 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -16,6 +16,8 @@ import os import shutil import stat +import re +import pytest from hashlib import sha256 from ansible.utils.hashing import checksum @@ -641,7 +643,7 @@ def test_fetch_use_data_set_qualifier(ansible_zos_module): finally: if os.path.exists(dest_path): os.remove(dest_path) - hosts.all.zos_data_set(src="OMVSADM." + src, state="absent") + hosts.all.zos_data_set(name="OMVSADM." + src, state="absent") def test_fetch_flat_create_dirs(ansible_zos_module, z_python_interpreter): @@ -664,3 +666,133 @@ def test_fetch_flat_create_dirs(ansible_zos_module, z_python_interpreter): finally: if os.path.exists(dest_path): shutil.rmtree("/tmp/" + remote_host) + + +def test_fetch_sequential_data_set_with_special_chars(ansible_zos_module): + hosts = ansible_zos_module + TEST_PS = get_tmp_ds_name(symbols=True) + + hosts.all.zos_data_set( + name=TEST_PS, + state="present", + type="seq", + space_type="m", + space_primary=5 + ) + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_PS}\"") + params = dict(src=TEST_PS, dest="/tmp/", flat=True) + dest_path = f"/tmp/{TEST_PS}" + + try: + results = hosts.all.zos_fetch(**params) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("data_set_type") == "Sequential" + assert result.get("module_stderr") is None + assert result.get("dest") == dest_path + assert os.path.exists(dest_path) + finally: + hosts.all.zos_data_set(name=TEST_PS, state="absent") + if os.path.exists(dest_path): + os.remove(dest_path) + + +@pytest.mark.parametrize("generation", ["0", "-1"]) +def test_fetch_gds_from_gdg(ansible_zos_module, generation): + hosts = ansible_zos_module + TEST_GDG = get_tmp_ds_name() + TEST_GDS = f"{TEST_GDG}({generation})" + + hosts.all.zos_data_set(name=TEST_GDG, state="present", type="gdg", limit=3) + hosts.all.zos_data_set(name=f"{TEST_GDG}(+1)", state="present", type="seq") + hosts.all.zos_data_set(name=f"{TEST_GDG}(+1)", state="present", type="seq") + + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_GDS}\"") + params = dict(src=TEST_GDS, dest="/tmp/", flat=True) + dest_path = "" + + try: + results = hosts.all.zos_fetch(**params) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("data_set_type") == "Sequential" + assert result.get("module_stderr") is None + + # Checking that we got a dest of the form: ANSIBLE.DATA.SET.G0001V01. + dest_path = result.get("dest", "") + dest_pattern = r"G[0-9]+V[0-9]+" + + assert TEST_GDG in dest_path + assert re.fullmatch(dest_pattern, dest_path.split(".")[-1]) + assert os.path.exists(dest_path) + finally: + hosts.all.zos_data_set(name=f"{TEST_GDG}(-1)", state="absent") + hosts.all.zos_data_set(name=f"{TEST_GDG}(0)", state="absent") + hosts.all.zos_data_set(name=TEST_GDG, state="absent") + + if dest_path != "" and os.path.exists(dest_path): + os.remove(dest_path) + + +def test_error_fetch_inexistent_gds(ansible_zos_module): + hosts = ansible_zos_module + TEST_GDG = get_tmp_ds_name() + TEST_GDS = f"{TEST_GDG}(+1)" + + hosts.all.zos_data_set(name=TEST_GDG, state="present", type="gdg", limit=3) + hosts.all.zos_data_set(name=f"{TEST_GDG}(+1)", state="present", type="seq") + + params = dict(src=TEST_GDS, dest="/tmp/", flat=True) + + try: + results = hosts.all.zos_fetch(**params) + for result in results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert "does not exist" in result.get("msg", "") + + finally: + hosts.all.zos_data_set(name=f"{TEST_GDG}(0)", state="absent") + hosts.all.zos_data_set(name=TEST_GDG, state="absent") + + +def test_fetch_gdg(ansible_zos_module): + hosts = ansible_zos_module + TEST_GDG = get_tmp_ds_name() + + hosts.all.zos_data_set(name=TEST_GDG, state="present", type="gdg", limit=3) + hosts.all.zos_data_set(name=f"{TEST_GDG}(+1)", state="present", type="seq") + hosts.all.zos_data_set(name=f"{TEST_GDG}(+1)", state="present", type="seq") + + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_GDG}(-1)\"") + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_GDG}(0)\"") + + params = dict(src=TEST_GDG, dest="/tmp/", flat=True) + + try: + results = hosts.all.zos_fetch(**params) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("data_set_type") == "Generation Data Group" + assert result.get("module_stderr") is None + + # Checking that we got a dest of the form: ANSIBLE.DATA.SET.G0001V01. + dest_path = result.get("dest", "") + dest_pattern = r"G[0-9]+V[0-9]+" + + assert TEST_GDG in dest_path + assert os.path.exists(dest_path) + + # Checking that the contents of the dir match with what we would expect to get: + # Multiple generation data sets conforming to the pattern defined above. + for file_name in os.listdir(dest_path): + assert re.fullmatch(dest_pattern, file_name.split(".")[-1]) + assert os.path.exists(os.path.join(dest_path, file_name)) + + finally: + hosts.all.zos_data_set(name=f"{TEST_GDG}(-1)", state="absent") + hosts.all.zos_data_set(name=f"{TEST_GDG}(0)", state="absent") + hosts.all.zos_data_set(name=TEST_GDG, state="absent") + + if os.path.exists(dest_path): + shutil.rmtree(dest_path) From 45c556109cfa384a18873f9ef38674e58c1835df Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Wed, 5 Jun 2024 11:08:35 -0600 Subject: [PATCH 397/495] [Enabler][test_zos_ping_func] Update test suites on functional/modules/test_zos_ping_func.py to be pylint correct (#1477) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_job_submit_func.py to be pylint correct * Remove accidental changes on functional/modules/test_zos_job_submit_func.py * Update test suites on functional/modules/test_zos_ping_func.py function to be pylint correct --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- tests/functional/modules/test_zos_ping_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_ping_func.py b/tests/functional/modules/test_zos_ping_func.py index 5be9f4508..8d194cd81 100644 --- a/tests/functional/modules/test_zos_ping_func.py +++ b/tests/functional/modules/test_zos_ping_func.py @@ -16,7 +16,7 @@ __metaclass__ = type -def test_job_submit_PDS(ansible_zos_module): +def test_job_submit_pds(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_ping() for result in results.contacted.values(): From f5fbcd4090f7dad6c1e9b428e26d9bf98f28fea9 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 5 Jun 2024 14:33:04 -0600 Subject: [PATCH 398/495] [Enabler] [zos_backup_restore] Add support for GDG and special characters (#1527) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Added tests for gds * Added changelog and docs * Fixed issue * fixed unit tests * Added changlog for zos_backup * Reverted repr --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- changelogs/fragments/1527-zos_backup-gdg.yml | 5 ++ plugins/modules/zos_backup_restore.py | 60 ++++++++++---- .../modules/test_zos_backup_restore.py | 82 +++++++++++++++++++ 3 files changed, 133 insertions(+), 14 deletions(-) create mode 100644 changelogs/fragments/1527-zos_backup-gdg.yml diff --git a/changelogs/fragments/1527-zos_backup-gdg.yml b/changelogs/fragments/1527-zos_backup-gdg.yml new file mode 100644 index 000000000..9d84127b9 --- /dev/null +++ b/changelogs/fragments/1527-zos_backup-gdg.yml @@ -0,0 +1,5 @@ +minor_changes: + - zos_data_set - Added support for GDS relative name notation to include or exclude data sets when + operation is backup. Added support for data set names with special characters + like $, /#, and @. + (https://github.com/ansible-collections/ibm_zos_core/pull/1527). \ No newline at end of file diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 0c814637e..c64ed0535 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -51,6 +51,7 @@ description: - When I(operation=backup), specifies a list of data sets or data set patterns to include in the backup. + - When I(operation=backup) GDS relative names are supported. - When I(operation=restore), specifies a list of data sets or data set patterns to include when restoring from a backup. - The single asterisk, C(*), is used in place of exactly one qualifier. @@ -68,6 +69,7 @@ description: - When I(operation=backup), specifies a list of data sets or data set patterns to exclude from the backup. + - When I(operation=backup) GDS relative names are supported. - When I(operation=restore), specifies a list of data sets or data set patterns to exclude when restoring from a backup. - The single asterisk, C(*), is used in place of exactly one qualifier. @@ -117,6 +119,7 @@ - There are no enforced conventions for backup names. However, using a common extension like C(.dzp) for UNIX files and C(.DZP) for data sets will improve readability. + - GDS relative names are supported when I(operation=restore). type: str required: True recover: @@ -217,6 +220,15 @@ exclude: user.private.* backup_name: MY.BACKUP.DZP +- name: Backup a list of GDDs to data set my.backup.dzp + zos_backup_restore: + operation: backup + data_sets: + include: + - user.gdg(-1) + - user.gdg(0) + backup_name: my.backup.dzp + - name: Backup all datasets matching the pattern USER.** to UNIX file /tmp/temp_backup.dzp, ignore recoverable errors. zos_backup_restore: operation: backup @@ -312,18 +324,18 @@ sms_management_class: DB2SMS10 """ -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( - BetterArgParser, -) -from ansible.module_utils.basic import AnsibleModule +import traceback +from os import path +from re import IGNORECASE, match, search -from re import match, search, IGNORECASE +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import \ + BetterArgParser +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import \ + DataSet +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \ + ZOAUImportError -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - ZOAUImportError, -) -from os import path -import traceback try: from zoautil_py import datasets from zoautil_py import exceptions as zoau_exceptions @@ -386,8 +398,8 @@ def main(): if operation == "backup": backup( backup_name=backup_name, - include_data_sets=data_sets.get("include"), - exclude_data_sets=data_sets.get("exclude"), + include_data_sets=resolve_gds_name_if_any(data_sets.get("include")), + exclude_data_sets=resolve_gds_name_if_any(data_sets.get("exclude")), volume=volume, full_volume=full_volume, temp_volume=temp_volume, @@ -423,6 +435,26 @@ def main(): module.exit_json(**result) +def resolve_gds_name_if_any(data_set_list): + """Resolve all gds names in a list, if no gds relative name is found then + the original name will be kept. + Parameters + ---------- + data_set_list : list + List of data set names. + + Returns + ------- + list + List of data set names with resolved gds names. + """ + if isinstance(data_set_list, list): + for index, name in enumerate(data_set_list): + if DataSet.is_gds_relative_name(name): + data_set_list[index] = DataSet.resolve_gds_absolute_name(name) + return data_set_list + + def parse_and_validate_args(params): """Parse and validate arguments to be used by remainder of module. @@ -662,7 +694,7 @@ def data_set_pattern_type(contents, dependencies): ) for pattern in contents: if not match( - r"^(?:(?:[A-Za-z$#@\?\*]{1}[A-Za-z0-9$#@\-\?\*]{0,7})(?:[.]{1})){1,21}[A-Za-z$#@\*\?]{1}[A-Za-z0-9$#@\-\*\?]{0,7}$", + r"^(?:(?:[A-Za-z$#@\?\*]{1}[A-Za-z0-9$#@\-\?\*]{0,7})(?:[.]{1})){1,21}[A-Za-z$#@\*\?]{1}[A-Za-z0-9$#@\-\*\?]{0,7}(?:\(([-+]?[0-9]+)\)){0,1}$", str(pattern), IGNORECASE, ): @@ -832,7 +864,7 @@ def backup_name_type(contents, dependencies): if contents is None: return None if not match( - r"^(?:(?:[A-Za-z$#@\?\*]{1}[A-Za-z0-9$#@\-\?\*]{0,7})(?:[.]{1})){1,21}[A-Za-z$#@\*\?]{1}[A-Za-z0-9$#@\-\*\?]{0,7}$", + r"^(?:(?:[A-Za-z$#@\?\*]{1}[A-Za-z0-9$#@\-\?\*]{0,7})(?:[.]{1})){1,21}[A-Za-z$#@\*\?]{1}[A-Za-z0-9$#@\-\*\?]{0,7}(?:\(([-+]?[0-9]+)\)){0,1}$", str(contents), IGNORECASE, ): diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index ca7ef740a..c265f146a 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -713,3 +713,85 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): # finally: # delete_data_set_or_file(hosts, data_set_name) # delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + + +@pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) +def test_backup_gds(ansible_zos_module, dstype): + try: + hosts = ansible_zos_module + # We need to replace hyphens because of NAZARE-10614: dzip fails archiving data set names with '-' + data_set_name = get_tmp_ds_name(symbols=True).replace("-", "") + backup_dest = get_tmp_ds_name(symbols=True).replace("-", "") + results = hosts.all.zos_data_set(name=data_set_name, state="present", type="gdg", limit=3) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=[f"{data_set_name}(-1)", f"{data_set_name}(0)"]), + backup_name=backup_dest, + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + finally: + hosts.all.shell(cmd=f"drm ANSIBLE.* ") + + +@pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) +def test_backup_into_gds(ansible_zos_module, dstype): + """This test will create a dataset and backup it into a new generation of + backup data sets. + """ + try: + hosts = ansible_zos_module + # We need to replace hyphens because of NAZARE-10614: dzip fails archiving data set names with '-' + data_set_name = get_tmp_ds_name(symbols=True).replace("-", "") + ds_name = get_tmp_ds_name(symbols=True).replace("-", "") + results = hosts.all.zos_data_set(name=data_set_name, state="present", type="gdg", limit=3) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=ds_name, state="present", type=dstype) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + ds_to_write = f"{ds_name}(MEM)" if dstype in ['pds', 'pdse'] else ds_name + results = hosts.all.shell(cmd=f"decho 'test line' \"{ds_to_write}\"") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=[ds_name]), + backup_name=f"{data_set_name}.G0002V00", + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"drm \"{ds_name}\"") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=f"{data_set_name}(0)", + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + finally: + hosts.all.shell(cmd=f"drm ANSIBLE.* ") + From d1afc17b2f46a411d4d76f7c047a6e28796cc927 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Wed, 5 Jun 2024 14:33:42 -0600 Subject: [PATCH 399/495] [Enabler][test_zos_tso_command_func] Update test suites on functional/modules/test_zos_tso_command_func.py to be pylint correct (#1480) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_job_submit_func.py to be pylint correct * Remove accidental changes on functional/modules/test_zos_job_submit_func.py * Update test suites on functional/modules/test_zos_tso_command_func.py to be pylint correct * Update test_zos_tso_command_func.py --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .../modules/test_zos_tso_command_func.py | 36 +++++++------------ 1 file changed, 13 insertions(+), 23 deletions(-) diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index 08eefe336..e4665bb71 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -15,9 +15,6 @@ __metaclass__ = type -import ansible.constants -import ansible.errors -import ansible.utils from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name def test_zos_tso_command_run_help(ansible_zos_module): @@ -50,7 +47,8 @@ def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): hosts = ansible_zos_module default_temp_dataset = get_tmp_ds_name() command_string = [ - "alloc da('{0}') catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)".format(default_temp_dataset) + f"alloc da('{default_temp_dataset}') "+ + "catalog lrecl(133) blksize(13300) recfm(f b) dsorg(po) cylinders space(5,5) dir(5)" ] results_allocate = hosts.all.zos_tso_command(commands=command_string) # Validate the correct allocation of dataset @@ -59,34 +57,35 @@ def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): assert item.get("rc") == 0 assert result.get("changed") is True # Validate listds of datasets and validate LISTDS using alias param 'command' of auth command - results = hosts.all.zos_tso_command(commands=["LISTDS '{0}'".format(default_temp_dataset)]) + results = hosts.all.zos_tso_command(commands=[f"LISTDS '{default_temp_dataset}'"]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Validate LISTDS using alias param 'command' - results = hosts.all.zos_tso_command(command="LISTDS '{0}'".format(default_temp_dataset)) + results = hosts.all.zos_tso_command(command=f"LISTDS '{default_temp_dataset}'") for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Validate LISTCAT command and an unauth command results = hosts.all.zos_tso_command( - commands=["LISTCAT ENT('{0}')".format(default_temp_dataset)] + commands=[f"LISTCAT ENT('{default_temp_dataset}')"] ) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True # Validate remove dataset - results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(default_temp_dataset)]) + results = hosts.all.zos_tso_command(commands=[f"delete '{default_temp_dataset}'"]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 assert result.get("changed") is True - # Expect the tso_command to fail here because the previous command will have already deleted the data set + # Expect the tso_command to fail here because + # the previous command will have already deleted the data set # Validate data set was removed by previous call - results = hosts.all.zos_tso_command(commands=["delete '{0}'".format(default_temp_dataset)]) + results = hosts.all.zos_tso_command(commands=[f"delete '{default_temp_dataset}'"]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 8 @@ -134,20 +133,11 @@ def test_zos_tso_command_multiple_commands(ansible_zos_module): # The command that kicks off rc>0 which is allowed def test_zos_tso_command_maxrc(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC"],max_rc=4) - for result in results.contacted.values(): - for item in result.get("output"): - assert item.get("rc") < 5 - assert result.get("changed") is True - - -# The positive test -# The command that kicks off rc>0 which is allowed -def test_zos_tso_command_maxrc(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC"],max_rc=4) + results = hosts.all.zos_tso_command( + commands=["LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC"], + max_rc=4 + ) for result in results.contacted.values(): for item in result.get("output"): - print( item ) assert item.get("rc") < 5 assert result.get("changed") is True From 4d9cc7a3d3dbb7e3de0ce4a56f457750bbe3dc86 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Wed, 5 Jun 2024 14:44:11 -0600 Subject: [PATCH 400/495] [Enabler][test_zos_volume_init_func] Update test suites on functional/modules/test_zos_volume_init_func.py to be pylint correct (#1479) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_job_submit_func.py to be pylint correct * Remove accidental changes on functional/modules/test_zos_job_submit_func.py * Update test suites on functional/modules/test_zos_volume_init_func.py to be pylint correct * Update test_zos_volume_init_func.py --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .../modules/test_zos_volume_init_func.py | 29 ++++++++++--------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/tests/functional/modules/test_zos_volume_init_func.py b/tests/functional/modules/test_zos_volume_init_func.py index 39952105c..47ad965b1 100644 --- a/tests/functional/modules/test_zos_volume_init_func.py +++ b/tests/functional/modules/test_zos_volume_init_func.py @@ -38,12 +38,12 @@ def test_guard_rail_and_setup(ansible_zos_module): hosts.all.zos_data_set(name="IMSTESTL.IMS01.SPOOL2", state="absent") hosts.all.zos_data_set(name="IMSTESTL.IMS01.SPOOL3", state="absent") - params = dict( - address=TEST_VOL_ADDR, - verify_offline=False, - volid=TEST_VOL_SER, - verify_volid='USER02' - ) + params = { + "address":TEST_VOL_ADDR, + "verify_offline":False, + "volid":TEST_VOL_SER, + "verify_volid":'USER02' + } # take volume offline hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") @@ -137,7 +137,9 @@ def test_volid_address_assigned_correctly(ansible_zos_module): # parameter is correctly assigned to the 'address' parameter. # Display command to print device status, volser and addr should correspond - display_cmd_output = list(hosts.all.zos_operator(cmd=f"D U,VOL={TEST_VOL_SER}").contacted.values())[0] + display_cmd_output = list( + hosts.all.zos_operator(cmd=f"D U,VOL={TEST_VOL_SER}").contacted.values() + )[0] # zos_operator output contains the command as well, only the last line of # the output is relevant for the needs of this test case. @@ -260,7 +262,8 @@ def test_good_param_values(ansible_zos_module, params): 'volid': TEST_VOL_SER, 'vtoc_size': -10 }, 12), - # note - "'vtoc_size': 0" gets treated as vtoc_size wasn't defined and invokes default behavior. + # note - "'vtoc_size': 0" gets treated as vtoc_size wasn't defined + # and invokes default behavior. # volid check - incorrect existing volid ({ 'address': TEST_VOL_ADDR, @@ -362,11 +365,11 @@ def test_no_existing_data_sets_check(ansible_zos_module): def test_minimal_params(ansible_zos_module): hosts = ansible_zos_module - params = dict( - address=TEST_VOL_ADDR, - verify_offline=False, - volid=TEST_VOL_SER - ) + params = { + "address":TEST_VOL_ADDR, + "verify_offline":False, + "volid":TEST_VOL_SER + } # take volume offline hosts.all.zos_operator(cmd=f"vary {TEST_VOL_ADDR},offline") From 1180a0ecaaf01d541cb443e81fe735e67283f4de Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Wed, 5 Jun 2024 15:00:50 -0600 Subject: [PATCH 401/495] [Enabler][test_zos_apf_func] Update test suites on functional/module-utils/test_zos_apf_func.py to be pylint correct (#1445) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/modules/test_zos_apf_func.py to be pylint correct * Update test suites on functional/modules/test_zos_apf_func.py to be pylint correct * Update test_zos_apf_func.py --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- tests/functional/modules/test_zos_apf_func.py | 464 +++++++++++------- 1 file changed, 283 insertions(+), 181 deletions(-) diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 74e74d516..56ed39c30 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -12,25 +12,25 @@ # limitations under the License. from __future__ import absolute_import, division, print_function +import pytest from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name from ibm_zos_core.tests.helpers.volumes import Volume_Handler -from shlex import quote __metaclass__ = type -add_expected = """/*BEGINAPFLIST*/ +ADD_EXPECTED = """/*BEGINAPFLIST*/ /*BEGINBLOCK*/ APFADDDSNAME({0})VOLUME({1}) /*ENDBLOCK*/ /*ENDAPFLIST*/""" -add_sms_expected = """/*BEGINAPFLIST*/ -/*BEGINBLOCK*/ -APFADDDSNAME({0})SMS -/*ENDBLOCK*/ -/*ENDAPFLIST*/""" +# ADD_SMS_EXPECTED = """/*BEGINAPFLIST*/ +# /*BEGINBLOCK*/ +# APFADDDSNAME({0})SMS +# /*ENDBLOCK*/ +# /*ENDAPFLIST*/""" -add_batch_expected = """/*BEGINAPFLIST*/ +ADD_BATCH_EXPECTED = """/*BEGINAPFLIST*/ /*BEGINBLOCK*/ APFADDDSNAME({0})VOLUME({1}) APFADDDSNAME({2})VOLUME({3}) @@ -38,40 +38,44 @@ /*ENDBLOCK*/ /*ENDAPFLIST*/""" -del_expected = """/*BEGINAPFLIST*/ +DEL_EXPECTED = """/*BEGINAPFLIST*/ /*ENDAPFLIST*/""" def clean_test_env(hosts, test_info): - cmdStr = "drm {0}".format(test_info['library']) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"drm {test_info['library']}" + hosts.all.shell(cmd=cmd_str) if test_info.get('persistent'): - cmdStr = "drm {0}".format(test_info['persistent']['data_set_name']) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"drm {test_info['persistent']['data_set_name']}" + hosts.all.shell(cmd=cmd_str) def test_add_del(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", state="present", force_dynamic=True) - ds = get_tmp_ds_name(3,2,True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "state":"present", + "force_dynamic":True + } + ds = get_tmp_ds_name(3,2) hosts.all.shell(f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -87,28 +91,37 @@ def test_add_del(ansible_zos_module, volumes_with_vvds): def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() tmphlq = "TMPHLQ" - test_info = dict(library="", state="present", force_dynamic=True, tmp_hlq="", persistent=dict(data_set_name="", backup=True)) + test_info = { + "library":"", + "state":"present", + "force_dynamic":True, + "tmp_hlq":"", + "persistent":{ + "data_set_name":"", + "backup":True + } + } test_info['tmp_hlq'] = tmphlq ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -125,26 +138,32 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): def test_add_del_volume(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", volume="", state="present", force_dynamic=True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "volume":"", + "state":"present", + "force_dynamic":True + } ds = get_tmp_ds_name(1,1,True) + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) @@ -158,65 +177,74 @@ def test_add_del_volume(ansible_zos_module, volumes_with_vvds): clean_test_env(hosts, test_info) -""" -This test case was removed 3 years ago in the following PR : https://github.com/ansible-collections/ibm_zos_core/pull/197 -def test_add_del_persist(ansible_zos_module): - hosts = ansible_zos_module - test_info = TEST_INFO['test_add_del_persist'] - set_test_env(hosts, test_info) - results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("rc") == 0 - add_exptd = add_sms_expected.format(test_info['library']) - add_exptd = add_exptd.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") - assert actual == add_exptd - test_info['state'] = 'absent' - results = hosts.all.zos_apf(**test_info) - pprint(vars(results)) - for result in results.contacted.values(): - assert result.get("rc") == 0 - del_exptd = del_expected.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - actual = run_shell_cmd(hosts, cmdStr).replace(" ", "") - assert actual == del_exptd - clean_test_env(hosts, test_info) -""" + +#This test case was removed 3 years ago in the following PR : +#https://github.com/ansible-collections/ibm_zos_core/pull/197 +#def test_add_del_persist(ansible_zos_module): +# hosts = ansible_zos_module +# test_info = TEST_INFO['test_add_del_persist'] +# set_test_env(hosts, test_info) +# results = hosts.all.zos_apf(**test_info) +# pprint(vars(results)) +# for result in results.contacted.values(): +# assert result.get("rc") == 0 +# add_exptd = ADD_SMS_EXPECTED.format(test_info['library']) +# add_exptd = add_exptd.replace(" ", "") +# cmd_str = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) +# actual = run_shell_cmd(hosts, cmd_str).replace(" ", "") +# assert actual == add_exptd +# test_info['state'] = 'absent' +# results = hosts.all.zos_apf(**test_info) +# pprint(vars(results)) +# for result in results.contacted.values(): +# assert result.get("rc") == 0 +# del_exptd = DEL_EXPECTED.replace(" ", "") +# cmd_str = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) +# actual = run_shell_cmd(hosts, cmd_str).replace(" ", "") +# assert actual == del_exptd +# clean_test_env(hosts, test_info) + def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", volume="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "volume":"", + "persistent":{ + "data_set_name":"", + "marker":"/* {mark} BLOCK */"}, + "state":"present", + "force_dynamic":True + } ds = get_tmp_ds_name(1,1,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 - add_exptd = add_expected.format(test_info['library'], test_info['volume']) + add_exptd = ADD_EXPECTED.format(test_info['library'], test_info['volume']) add_exptd = add_exptd.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - results = hosts.all.shell(cmd=cmdStr) + cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") actual = actual.replace(" ", "") @@ -225,9 +253,9 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 - del_exptd = del_expected.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - results = hosts.all.shell(cmd=cmdStr) + del_exptd = DEL_EXPECTED.replace(" ", "") + cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") actual = actual.replace(" ", "") @@ -235,43 +263,67 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): finally: clean_test_env(hosts, test_info) -""" -keyword: ENABLE-FOR-1-3 -Test commented because there is a failure in ZOAU 1.2.x, that should be fixed in 1.3.x, so -whoever works in issue https://github.com/ansible-collections/ibm_zos_core/issues/726 -should uncomment this test as part of the validation process. -""" + +# keyword: ENABLE-FOR-1-3 +# Test commented because there is a failure in ZOAU 1.2.x, that should be fixed in 1.3.x, so +# whoever works in issue https://github.com/ansible-collections/ibm_zos_core/issues/726 +# should uncomment this test as part of the validation process. + def test_batch_add_del(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict( - batch=[dict(library="", volume=" "), dict(library="", volume=" "), dict(library="", volume=" ")], - persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True - ) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "batch":[ + { + "library":"", + "volume":" " + }, + { + "library":"", + "volume":" " + }, + { + "library":"", + "volume":" " + } + ], + "persistent":{ + "data_set_name":"", + "marker":"/* {mark} BLOCK */" + }, + "state":"present", + "force_dynamic":True + } for item in test_info['batch']: ds = get_tmp_ds_name(1,1,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") item['library'] = ds - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") item['volume'] = vol prstds = get_tmp_ds_name(5,5,True) - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) + test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 - add_exptd = add_batch_expected.format(test_info['batch'][0]['library'], test_info['batch'][0]['volume'], - test_info['batch'][1]['library'], test_info['batch'][1]['volume'], - test_info['batch'][2]['library'], test_info['batch'][2]['volume']) + add_exptd = ADD_BATCH_EXPECTED.format( + test_info['batch'][0]['library'], + test_info['batch'][0]['volume'], + test_info['batch'][1]['library'], + test_info['batch'][1]['volume'], + test_info['batch'][2]['library'], + test_info['batch'][2]['volume'] + ) add_exptd = add_exptd.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - results = hosts.all.shell(cmd=cmdStr) + cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") actual = actual.replace(" ", "") @@ -280,9 +332,9 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 - del_exptd = del_expected.replace(" ", "") - cmdStr = "cat \"//'{0}'\" ".format(test_info['persistent']['data_set_name']) - results = hosts.all.shell(cmd=cmdStr) + del_exptd = DEL_EXPECTED.replace(" ", "") + cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") actual = actual.replace(" ", "") @@ -290,18 +342,19 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): finally: for item in test_info['batch']: clean_test_env(hosts, item) - hosts.all.shell(cmd="drm {0}".format(test_info['persistent']['data_set_name'])) + hosts.all.shell(cmd=f"drm {test_info['persistent']['data_set_name']}") def test_operation_list(ansible_zos_module): + import json hosts = ansible_zos_module - test_info = dict(operation="list") + test_info = { + "operation":"list" + } results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - listJson = result.get("stdout") - print(listJson) - import json - data = json.loads(listJson) + list_json = result.get("stdout") + data = json.loads(list_json) assert data['format'] in ['DYNAMIC', 'STATIC'] del json @@ -309,35 +362,42 @@ def test_operation_list(ansible_zos_module): def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", state="present", force_dynamic=True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "state":"present", + "force_dynamic":True + } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds hosts.all.zos_apf(**test_info) - ti = dict(operation="list", library="") + ti = { + "operation":"list", + "library":"" + } ti['library'] = "ANSIBLE.*" results = hosts.all.zos_apf(**ti) for result in results.contacted.values(): - listFiltered = result.get("stdout") - assert test_info['library'] in listFiltered + list_filtered = result.get("stdout") + assert test_info['library'] in list_filtered test_info['state'] = 'absent' hosts.all.zos_apf(**test_info) finally: @@ -351,27 +411,31 @@ def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): def test_add_already_present(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", state="present", force_dynamic=True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "state":"present", + "force_dynamic":True + } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -389,26 +453,30 @@ def test_add_already_present(ansible_zos_module, volumes_with_vvds): def test_del_not_present(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", state="present", force_dynamic=True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "state":"present", + "force_dynamic":True + } ds = get_tmp_ds_name(1,1,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) @@ -421,7 +489,11 @@ def test_del_not_present(ansible_zos_module, volumes_with_vvds): def test_add_not_found(ansible_zos_module): hosts = ansible_zos_module - test_info = dict(library="", state="present", force_dynamic=True) + test_info = { + "library":"", + "state":"present", + "force_dynamic":True + } test_info['library'] = 'APFTEST.FOO.BAR' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -432,27 +504,32 @@ def test_add_not_found(ansible_zos_module): def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", volume="", state="present", force_dynamic=True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "volume":"", + "state":"present", + "force_dynamic":True + } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds test_info['volume'] = 'T12345' results = hosts.all.zos_apf(**test_info) @@ -466,30 +543,39 @@ def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "persistent":{ + "data_set_name":"", + "marker":"/* {mark} BLOCK */" + }, + "state":"present", + "force_dynamic":True + } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds - cmdStr = "decho \"some text to test persistent data_set format validattion.\" \"{0}\"".format(test_info['persistent']['data_set_name']) - hosts.all.shell(cmd=cmdStr) + ds_name = test_info['persistent']['data_set_name'] + cmd_str =f"decho \"some text to test persistent data_set format validation.\" \"{ds_name}\"" + hosts.all.shell(cmd=cmd_str) results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 8 @@ -500,27 +586,35 @@ def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "persistent":{ + "data_set_name":"", + "marker":"/* {mark} BLOCK */" + }, + "state":"present", + "force_dynamic":True + } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds test_info['persistent']['marker'] = "# Invalid marker format" results = hosts.all.zos_apf(**test_info) @@ -533,27 +627,35 @@ def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module - VolumeHandler = Volume_Handler(volumes_with_vvds) - volume = VolumeHandler.get_available_vol() - test_info = dict(library="", persistent=dict(data_set_name="", marker="/* {mark} BLOCK */"), state="present", force_dynamic=True) + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + test_info = { + "library":"", + "persistent":{ + "data_set_name":"", + "marker":"/* {mark} BLOCK */" + }, + "state":"present", + "force_dynamic":True + } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") test_info['library'] = ds if test_info.get('volume') is not None: - cmdStr = "dls -l " + ds + " | awk '{print $5}' " - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "dls -l " + ds + " | awk '{print $5}' " + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmdStr = "mvstmp APFTEST.PRST" - results = hosts.all.shell(cmd=cmdStr) + cmd_str = "mvstmp APFTEST.PRST" + results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmdStr = "dtouch -tseq {0}".format(prstds) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"dtouch -tseq {prstds}" + hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") results = hosts.all.zos_apf(**test_info) From 54d5ebaf455a8bde56493a6f86247fa3a98d6a6b Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Thu, 6 Jun 2024 09:15:38 -0400 Subject: [PATCH 402/495] Zoau132mount (#1534) * removed zoau 1.0.0-ga, replaced it with zoau 1.3.2.0 * corrected zoau 1.3.2 path to .0 not .1 --- scripts/mounts.env | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/scripts/mounts.env b/scripts/mounts.env index dde75ef71..dbc32bef7 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -28,19 +28,19 @@ # data_set - the z/OS data set containing the binaries to mount # space - must be a space before the closing quote # ------------------------------------------------------------------------------ -zoau_mount_list_str="1:1.0.0-ga:/zoau/v1.0.0-ga:IMSTESTU.ZOAU.V100.GA.ZFS "\ -"2:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS "\ -"3:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ -"4:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ -"5:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ -"6:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ -"7:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ -"8:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ -"9:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ -"10:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ -"11:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V102.GA.ZFS "\ -"12:1.3.0:/zoau/v1.3.0:IMSTESTU.ZOAU.V103.GA5.ZFS "\ -"13:1.3.1:/zoau/v1.3.1:IMSTESTU.ZOAU.V130.ZFS "\ +zoau_mount_list_str="1:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS "\ +"2:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ +"3:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ +"4:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ +"5:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ +"6:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ +"7:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ +"8:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ +"9:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ +"10:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V102.GA.ZFS "\ +"11:1.3.0:/zoau/v1.3.0:IMSTESTU.ZOAU.V103.GA5.ZFS "\ +"12:1.3.1:/zoau/v1.3.1:IMSTESTU.ZOAU.V130.ZFS "\ +"13:1.3.2:/zoau/v1.3.2.0:IMSTESTU.ZOAU.V100.GA.ZFS "\ "14:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " # ------------------------------------------------------------------------------ From 0354fe313aefc1dffe41ba0010876cb95c834aab Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 7 Jun 2024 13:44:11 -0600 Subject: [PATCH 403/495] [Enabler][test_zos_job_output_func] Update test suites on functional/modules/test_zos_job_output_func.py to be pylint correct (#1460) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_job_output_func.py to be pylint correct * Update test_zos_job_output_func.py --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- tests/functional/modules/test_zos_job_output_func.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 96bc0b2bc..606e93aab 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -95,14 +95,13 @@ def test_zos_job_output_job_exists(ansible_zos_module): hosts = ansible_zos_module hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + cmd=f"echo {quote(JCL_FILE_CONTENTS)} > {TEMP_PATH}/SAMPLE" ) jobs = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None + src=f"{TEMP_PATH}/SAMPLE", location="uss", volume=None ) for job in jobs.contacted.values(): - print(job) assert job.get("jobs") is not None for job in jobs.contacted.values(): @@ -124,10 +123,10 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): hosts = ansible_zos_module hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + cmd=f"echo {quote(JCL_FILE_CONTENTS)} > {TEMP_PATH}/SAMPLE" ) result = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None + src=f"{TEMP_PATH}/SAMPLE", location="uss", volume=None ) hosts.all.file(path=TEMP_PATH, state="absent") dd_name = "JESMSGLG" From fa9a69457cb19b0d1cd82bf1032acc8205d795b3 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 7 Jun 2024 13:44:38 -0600 Subject: [PATCH 404/495] [Enabler][test_zos_job_query_func] Update test suites on functional/modules/test_zos_job_query_func.py to be pylint correct (#1462) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_job_query_func.py to be pylint correct * Update test_zos_job_query_func.py --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .../modules/test_zos_job_query_func.py | 27 +++++++++---------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 11680ab57..525d61664 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -15,13 +15,12 @@ __metaclass__ = type +import tempfile import ansible.constants import ansible.errors import ansible.utils import pytest -from pprint import pprint from shellescape import quote -import tempfile from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name @@ -51,19 +50,19 @@ def test_zos_job_query_func(ansible_zos_module): def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module - JDATA_SET_NAME = get_tmp_ds_name() + jdata_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) + cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {TEMP_PATH}/SAMPLE" ) hosts.all.zos_data_set( - name=JDATA_SET_NAME, state="present", type="pds", replace=True + name=jdata_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, JDATA_SET_NAME) + cmd=f"cp {TEMP_PATH}/SAMPLE \"//'{jdata_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(JDATA_SET_NAME), location="data_set", wait_time_s=10 + src=f"{jdata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -77,26 +76,26 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=JDATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=jdata_set_name, state="absent") # test to show multi wildcard in Job_name query won't crash the search def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module - NDATA_SET_NAME = get_tmp_ds_name() + ndata_set_name = get_tmp_ds_name() hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCLQ_FILE_CONTENTS), TEMP_PATH) + cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {TEMP_PATH}/SAMPLE" ) hosts.all.zos_data_set( - name=NDATA_SET_NAME, state="present", type="pds", replace=True + name=ndata_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, NDATA_SET_NAME) + cmd=f"cp {TEMP_PATH}/SAMPLE \"//'{ndata_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(NDATA_SET_NAME), location="data_set", wait_time_s=10 + src=f"{ndata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -109,7 +108,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): finally: hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=NDATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=ndata_set_name, state="absent") def test_zos_job_id_query_short_ids_func(ansible_zos_module): From 8185ff65e2d33be5eecc4de9e77d4ab8c84aded9 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 7 Jun 2024 13:44:55 -0600 Subject: [PATCH 405/495] [Enabler][test_zos_fetch_func] Update test suites on functional/modules/test_zos_fetch_func.py to be pylint correct (#1458) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_fetch_func.py to be pylint correct * Update test_zos_fetch_func.py * Remove use of 'pass' --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> --- .../functional/modules/test_zos_fetch_func.py | 264 +++++++++++++----- 1 file changed, 190 insertions(+), 74 deletions(-) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 0a1a31c48..83d0c4947 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -23,7 +23,9 @@ from ansible.utils.hashing import checksum from shellescape import quote +# pylint: disable-next=import-error from ibm_zos_core.tests.helpers.volumes import Volume_Handler +# pylint: disable-next=import-error from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name __metaclass__ = type @@ -89,22 +91,22 @@ def extract_member_name(data_set): return member def create_and_populate_test_ps_vb(ansible_zos_module, name): - params=dict( - name=name, - type='seq', - record_format='vb', - record_length='3180', - block_size='3190' - ) + params={ + "name":name, + "type":'seq', + "record_format":'vb', + "record_length":'3180', + "block_size":'3190' + } ansible_zos_module.all.zos_data_set(**params) - ansible_zos_module.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, name)) + ansible_zos_module.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{name}\"") def delete_test_ps_vb(ansible_zos_module, name): - params=dict( - name=name, - state='absent' - ) + params={ + "name":name, + "state":'absent' + } ansible_zos_module.all.zos_data_set(**params) @@ -119,11 +121,11 @@ def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None) key_length (int, optional) -- Key length (only for KSDS data sets). key_offset (int, optional) -- Key offset (only for KSDS data sets). """ - params = dict( - name=name, - type=ds_type, - state="present" - ) + params = { + "name":name, + "type":ds_type, + "state":"present" + } if ds_type == "ksds": params["key_length"] = key_length params["key_offset"] = key_offset @@ -133,13 +135,16 @@ def create_vsam_data_set(hosts, name, ds_type, key_length=None, key_offset=None) def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module - params = dict(src="/etc/profile", dest="/tmp/", flat=True) + params = { + "src":"/etc/profile", + "dest":"/tmp/", + "flat":True + } dest_path = "/tmp/profile" results = None try: results = hosts.all.zos_fetch(**params) - print(results.contacted.values()) for result in results.contacted.values(): @@ -153,9 +158,14 @@ def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): def test_fetch_uss_file_replace_on_local_machine(ansible_zos_module): - open("/tmp/profile", "w").close() + with open("/tmp/profile", "w",encoding="utf-8") as file: + file.close() hosts = ansible_zos_module - params = dict(src="/etc/profile", dest="/tmp/", flat=True) + params = { + "src":"/etc/profile", + "dest":"/tmp/", + "flat":True + } dest_path = "/tmp/profile" local_checksum = checksum(dest_path, hash_func=sha256) @@ -172,7 +182,11 @@ def test_fetch_uss_file_replace_on_local_machine(ansible_zos_module): def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module - params = dict(src="/etc/profile", dest="/tmp/", flat=True) + params = { + "src":"/etc/profile", + "dest":"/tmp/", + "flat":True + } dest_path = "/tmp/profile" hosts.all.zos_fetch(**params) local_checksum = checksum(dest_path, hash_func=sha256) @@ -190,9 +204,19 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): hosts = ansible_zos_module TEST_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5) - hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) - params = dict(src=TEST_PS, dest="/tmp/", flat=True) + hosts.all.zos_data_set( + name=TEST_PS, + state="present", + type="seq", + space_type="m", + space_primary=5 + ) + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_PS}\"") + params = { + "src":TEST_PS, + "dest":"/tmp/", + "flat":True + } dest_path = "/tmp/" + TEST_PS try: results = hosts.all.zos_fetch(**params) @@ -212,7 +236,11 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): hosts = ansible_zos_module TEST_PS_VB = get_tmp_ds_name(3) create_and_populate_test_ps_vb(ansible_zos_module, TEST_PS_VB) - params = dict(src=TEST_PS_VB, dest="/tmp/", flat=True) + params = { + "src":TEST_PS_VB, + "dest":"/tmp/", + "flat":True + } dest_path = "/tmp/" + TEST_PS_VB try: results = hosts.all.zos_fetch(**params) @@ -234,8 +262,12 @@ def test_fetch_partitioned_data_set(ansible_zos_module): hosts.all.zos_data_set(name=TEST_PDS, state="present", type="pdse") TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) - params = dict(src=TEST_PDS, dest="/tmp/", flat=True) + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_PDS_MEMBER}\"") + params = { + "src":TEST_PDS, + "dest":"/tmp/", + "flat":True + } dest_path = "/tmp/" + TEST_PDS try: results = hosts.all.zos_fetch(**params) @@ -263,12 +295,12 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): # start by creating the vsam dataset (could use a helper instead? ) hosts.all.file(path=temp_jcl_path, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(volume_1, test_vsam)), temp_jcl_path) + cmd=f"echo {quote(KSDS_CREATE_JCL.format(volume_1, test_vsam))} > {temp_jcl_path}/SAMPLE" ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(temp_jcl_path), location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 ) - hosts.all.shell(cmd="echo \"{0}\c\" > {1}".format(TEST_DATA, USS_FILE)) + hosts.all.shell(cmd=f"echo \"{TEST_DATA}\c\" > {USS_FILE}") hosts.all.zos_encode( src=USS_FILE, dest=test_vsam, @@ -278,7 +310,12 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): }, ) - params = dict(src=test_vsam, dest="/tmp/", flat=True, is_binary=True) + params = { + "src":test_vsam, + "dest":"/tmp/", + "flat":True, + "is_binary":True + } results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get("changed") is True @@ -286,9 +323,8 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): assert result.get("module_stderr") is None assert result.get("dest") == dest_path assert os.path.exists(dest_path) - file = open(dest_path, 'r') + file = open(dest_path, 'r',encoding="utf-8") read_file = file.read() - print(read_file) assert read_file == TEST_DATA finally: @@ -303,7 +339,11 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): hosts = ansible_zos_module src_ds = "TEST.VSAM.DATA" create_vsam_data_set(hosts, src_ds, "ksds", key_length=12, key_offset=0) - params = dict(src=src_ds, dest="/tmp/", flat=True) + params = { + "src":src_ds, + "dest":"/tmp/", + "flat":True + } dest_path = "/tmp/" + src_ds try: results = hosts.all.zos_fetch(**params) @@ -325,10 +365,13 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): hosts.all.zos_data_set(name=TEST_PDS, state="present") TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) - params = dict( - src=TEST_PDS_MEMBER, dest="/tmp/", flat=True, is_binary=True - ) + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_PDS_MEMBER}\"") + params = { + "src":TEST_PDS_MEMBER, + "dest":"/tmp/", + "flat":True, + "is_binary":True + } dest_path = "/tmp/" + extract_member_name(TEST_PDS_MEMBER) try: results = hosts.all.zos_fetch(**params) @@ -349,9 +392,20 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): hosts = ansible_zos_module TEST_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5) - hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) - params = dict(src=TEST_PS, dest="/tmp/", flat=True, is_binary=True) + hosts.all.zos_data_set( + name=TEST_PS, + state="present", + type="seq", + space_type="m", + space_primary=5 + ) + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_PS}\"") + params = { + "src":TEST_PS, + "dest":"/tmp/", + "flat":True, + "is_binary":True + } dest_path = "/tmp/" + TEST_PS try: results = hosts.all.zos_fetch(**params) @@ -373,8 +427,13 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): hosts.all.zos_data_set(name=TEST_PDS, state="present", type="pdse") TEST_PDS_MEMBER = TEST_PDS + "(MEM)" hosts.all.zos_data_set(name=TEST_PDS_MEMBER, type="member") - hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PDS_MEMBER)) - params = dict(src=TEST_PDS, dest="/tmp/", flat=True, is_binary=True) + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_PDS_MEMBER}\"") + params = { + "src":TEST_PDS, + "dest":"/tmp/", + "flat":True, + "is_binary":True + } dest_path = "/tmp/" + TEST_PDS try: results = hosts.all.zos_fetch(**params) @@ -394,7 +453,11 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): def test_fetch_sequential_data_set_empty(ansible_zos_module): hosts = ansible_zos_module src = get_tmp_ds_name() - params = dict(src=src, dest="/tmp/", flat=True) + params = { + "src":src, + "dest":"/tmp/", + "flat":True + } dest_path = "/tmp/" + src try: hosts.all.zos_data_set(name=src, type='seq', state='present') @@ -423,7 +486,11 @@ def test_fetch_partitioned_data_set_empty_fails(ansible_zos_module): record_format="fba", record_length=25, ) - params = dict(src=pds_name, dest="/tmp/", flat=True) + params = { + "src":pds_name, + "dest":"/tmp/", + "flat":True + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -446,7 +513,11 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): ) hosts.all.zos_data_set(name=pds_name, type="pds") hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes") - params = dict(src=pds_name + "(MYDATA)", dest="/tmp/", flat=True) + params = { + "src":pds_name + "(MYDATA)", + "dest":"/tmp/", + "flat":True + } dest_path = "/tmp/MYDATA" try: results = hosts.all.zos_fetch(**params) @@ -464,12 +535,12 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): def test_fetch_missing_uss_file_does_not_fail(ansible_zos_module): hosts = ansible_zos_module - params = dict( - src="/tmp/dummy_file_on_remote_host", - dest="/tmp/", - flat=True, - fail_on_missing=False, - ) + params = { + "src":"/tmp/dummy_file_on_remote_host", + "dest":"/tmp/", + "flat":True, + "fail_on_missing":False, + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -482,7 +553,11 @@ def test_fetch_missing_uss_file_does_not_fail(ansible_zos_module): def test_fetch_missing_uss_file_fails(ansible_zos_module): hosts = ansible_zos_module - params = dict(src="/tmp/dummy_file_on_remote_host", dest="/tmp/", flat=True) + params = { + "src":"/tmp/dummy_file_on_remote_host", + "dest":"/tmp/", + "flat":True + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -494,9 +569,12 @@ def test_fetch_missing_uss_file_fails(ansible_zos_module): def test_fetch_missing_mvs_data_set_does_not_fail(ansible_zos_module): hosts = ansible_zos_module src = get_tmp_ds_name() - params = dict( - src=src, dest="/tmp/", flat=True, fail_on_missing=False - ) + params = { + "src":src, + "dest":"/tmp/", + "flat":True, + "fail_on_missing":False + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -511,7 +589,11 @@ def test_fetch_missing_mvs_data_set_does_not_fail(ansible_zos_module): def test_fetch_partitioned_data_set_member_missing_fails(ansible_zos_module): hosts = ansible_zos_module TEST_PDS = get_tmp_ds_name() - params = dict(src=TEST_PDS + "(DUMMY)", dest="/tmp/", flat=True) + params = { + "src":TEST_PDS + "(DUMMY)", + "dest":"/tmp/", + "flat":True + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -524,7 +606,11 @@ def test_fetch_partitioned_data_set_member_missing_fails(ansible_zos_module): def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): hosts = ansible_zos_module src = get_tmp_ds_name() - params = dict(src=src, dest="/tmp/", flat=True) + params = { + "src":src, + "dest":"/tmp/", + "flat":True + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -537,16 +623,26 @@ def test_fetch_mvs_data_set_missing_fails(ansible_zos_module): def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): hosts = ansible_zos_module TEST_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_type="m", space_primary=5) + hosts.all.zos_data_set( + name=TEST_PS, + state="present", + type="seq", + space_type="m", + space_primary=5 + ) ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") - hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_DATA, TEST_PS)) + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_PS}\"") dest_path = "/tmp/" + TEST_PS - with open(dest_path, "w") as infile: + with open(dest_path, "w", encoding="utf-8") as infile: infile.write(DUMMY_DATA) local_checksum = checksum(dest_path, hash_func=sha256) - params = dict(src=TEST_PS, dest="/tmp/", flat=True) + params = { + "src":TEST_PS, + "dest":"/tmp/", + "flat":True + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -575,13 +671,17 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) ) hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes") os.mkdir(dest_path) - with open(full_path, "w") as infile: + with open(full_path, "w", encoding="utf-8") as infile: infile.write(DUMMY_DATA) - with open(dest_path + "/NEWMEM", "w") as infile: + with open(dest_path + "/NEWMEM", "w", encoding="utf-8") as infile: infile.write(DUMMY_DATA) prev_timestamp = os.path.getmtime(dest_path) - params = dict(src=pds_name, dest="/tmp/", flat=True) + params = { + "src":pds_name, + "dest":"/tmp/", + "flat":True + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -597,10 +697,14 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) def test_fetch_uss_file_insufficient_write_permission_fails(ansible_zos_module): hosts = ansible_zos_module dest_path = "/tmp/profile" - with open(dest_path, "w"): - pass + with open(dest_path, "w",encoding="utf-8") as dest_file: + dest_file.close() os.chmod(dest_path, stat.S_IREAD) - params = dict(src="/etc/profile", dest="/tmp/", flat=True) + params = { + "src":"/etc/profile", + "dest":"/tmp/", + "flat":True + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -616,7 +720,11 @@ def test_fetch_pds_dir_insufficient_write_permission_fails(ansible_zos_module): dest_path = "/tmp/" + TEST_PDS os.mkdir(dest_path) os.chmod(dest_path, stat.S_IREAD) - params = dict(src=TEST_PDS, dest="/tmp/", flat=True) + params = { + "src":TEST_PDS, + "dest":"/tmp/", + "flat":True + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -631,11 +739,15 @@ def test_fetch_use_data_set_qualifier(ansible_zos_module): src = get_tmp_ds_name()[:25] dest_path = "/tmp/"+ src hosts.all.zos_data_set(name="OMVSADM." + src, type="seq", state="present") - params = dict(src=src, dest="/tmp/", flat=True, use_qualifier=True) + params = { + "src":src, + "dest":"/tmp/", + "flat":True, + "use_qualifier":True + } try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("data_set_type") == "Sequential" assert result.get("module_stderr") is None @@ -650,8 +762,12 @@ def test_fetch_flat_create_dirs(ansible_zos_module, z_python_interpreter): z_int = z_python_interpreter hosts = ansible_zos_module remote_host = z_int[1].get("inventory").strip(",") - dest_path = "/tmp/{0}/etc/ssh/ssh_config".format(remote_host) - params = dict(src="/etc/ssh/ssh_config", dest="/tmp/", flat=False) + dest_path = f"/tmp/{remote_host}/etc/ssh/ssh_config" + params = { + "src":"/etc/ssh/ssh_config", + "dest":"/tmp/", + "flat":False + } try: shutil.rmtree("/tmp/" + remote_host) except FileNotFoundError: From 9e9b32566c8d8be3b2f260378088395c6c820f14 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 7 Jun 2024 13:45:10 -0600 Subject: [PATCH 406/495] [Enabler][test_zos_encode_func] Update test suites on functional/modules/test_zos_encode_func.py to be pylint correct (#1457) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_encode_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Update test_zos_encode_func.py * Remove commented code * Fix bug --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> --- .../modules/test_zos_encode_func.py | 170 ++++++++++-------- 1 file changed, 95 insertions(+), 75 deletions(-) diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index df01a6133..cfb340fa4 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -12,9 +12,9 @@ # limitations under the License. from __future__ import absolute_import, division, print_function -from shellescape import quote -from pprint import pprint from os import path +from shellescape import quote +# pylint: disable-next=import-error from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name __metaclass__ = type @@ -93,11 +93,11 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, key_length (int, optional) -- Key length (only for KSDS data sets). key_offset (int, optional) -- Key offset (only for KSDS data sets). """ - params = dict( - name=name, - type=ds_type, - state="present" - ) + params = { + "name":name, + "type":ds_type, + "state":"present" + } if ds_type == "ksds": params["key_length"] = key_length params["key_offset"] = key_offset @@ -107,8 +107,12 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, if add_data: record_src = "/tmp/zos_copy_vsam_record" - hosts.all.shell(cmd="echo {0} >> {1}".format(quote(VSAM_RECORDS), record_src)) - hosts.all.zos_encode(src=record_src, dest=name, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) + hosts.all.shell(cmd=f"echo {quote(VSAM_RECORDS)} >> {record_src}") + hosts.all.zos_encode( + src=record_src, + dest=name, + encoding={"from": "ISO8859-1", "to": "IBM-1047"} + ) hosts.all.file(path=record_src, state="absent") def test_uss_encoding_conversion_with_invalid_encoding(ansible_zos_module): @@ -140,7 +144,6 @@ def test_uss_encoding_conversion_with_the_same_encoding(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("msg") is not None assert result.get("backup_name") is None @@ -159,14 +162,13 @@ def test_uss_encoding_conversion_without_dest(ansible_zos_module): "to": TO_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == USS_FILE assert result.get("backup_name") is None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_FILE)) + tag_results = hosts.all.shell(cmd=f"ls -T {USS_FILE}") for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: @@ -186,14 +188,13 @@ def test_uss_encoding_conversion_when_dest_not_exists_01(ansible_zos_module): "to": TO_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == USS_NONE_FILE assert result.get("backup_name") is None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_NONE_FILE)) + tag_results = hosts.all.shell(cmd=f"ls -T {USS_NONE_FILE}") for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: @@ -216,7 +217,6 @@ def test_uss_encoding_conversion_when_dest_not_exists_02(ansible_zos_module): "to": TO_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == MVS_PS assert result.get("dest") == MVS_NONE_PS @@ -245,7 +245,7 @@ def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): assert result.get("backup_name") is None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_FILE}") for result in tag_results.contacted.values(): assert FROM_ENCODING in result.get("stdout") finally: @@ -266,14 +266,13 @@ def test_uss_encoding_conversion_uss_file_to_uss_path(ansible_zos_module): "to": FROM_ENCODING, }, ) - pprint(vars(results)) for result in results.contacted.values(): assert result.get("src") == USS_FILE assert result.get("dest") == USS_DEST_PATH assert result.get("backup_name") is None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd="ls -T {0}/{1}".format(USS_DEST_PATH, path.basename(USS_FILE))) + tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_PATH}/{path.basename(USS_FILE)}") for result in tag_results.contacted.values(): assert FROM_ENCODING in result.get("stdout") finally: @@ -303,7 +302,7 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): assert result.get("backup_name") is not None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_PATH)) + tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_PATH}") for result in tag_results.contacted.values(): assert FROM_ENCODING in result.get("stdout") assert TO_ENCODING not in result.get("stdout") @@ -360,7 +359,7 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): assert result.get("backup_name") is not None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_FILE}") for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: @@ -374,7 +373,12 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): hosts = ansible_zos_module MVS_PDS = get_tmp_ds_name() hosts.all.copy(content=TEST_DATA, dest=USS_FILE) - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PDS, + state="present", + type="pds", + record_length=TEST_DATA_RECORD_LENGTH + ) results = hosts.all.zos_encode( src=USS_FILE, dest=MVS_PDS, @@ -399,7 +403,12 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): MVS_PDS = get_tmp_ds_name() MVS_PDS_MEMBER = MVS_PDS + '(MEM)' hosts.all.copy(content=TEST_DATA, dest=USS_FILE) - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PDS, + state="present", + type="pds", + record_length=TEST_DATA_RECORD_LENGTH + ) results = hosts.all.zos_data_set( name=MVS_PDS_MEMBER, type="member", state="present" ) @@ -430,7 +439,12 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): hosts = ansible_zos_module MVS_PDS = get_tmp_ds_name() MVS_PDS_MEMBER = MVS_PDS + '(MEM)' - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PDS, + state="present", + type="pds", + record_length=TEST_DATA_RECORD_LENGTH + ) hosts.all.zos_data_set( name=MVS_PDS_MEMBER, type="member", state="present" ) @@ -451,7 +465,7 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): assert result.get("backup_name") is not None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_FILE}") for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: @@ -467,7 +481,12 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): hosts.all.file(path=USS_PATH, state="directory") hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode1") hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode2") - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PDS, + state="present", + type="pds", + record_length=TEST_DATA_RECORD_LENGTH + ) results = hosts.all.zos_encode( src=USS_PATH, dest=MVS_PDS, @@ -497,7 +516,7 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): assert result.get("backup_name") is None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_PATH)) + tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_PATH}") for result in tag_results.contacted.values(): assert FROM_ENCODING in result.get("stdout") assert "untagged" not in result.get("stdout") @@ -513,7 +532,7 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): MVS_PDS_MEMBER = MVS_PDS + '(MEM)' MVS_PS = get_tmp_ds_name() hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.shell(cmd="cp {0} \"//'{1}'\" ".format(quote(TEST_DATA), MVS_PS)) + hosts.all.shell(cmd=f"cp {quote(TEST_DATA)} \"//'{MVS_PS}'\" ") hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") hosts.all.zos_data_set( name=MVS_PDS_MEMBER, type="member", state="present" @@ -527,7 +546,6 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): }, ) for result in results.contacted.values(): - print(result) assert result.get("src") == MVS_PS assert result.get("dest") == MVS_PDS_MEMBER assert result.get("backup_name") is None @@ -542,10 +560,10 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): hosts.all.copy(content=TEST_DATA, dest=USS_FILE) hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) + cmd=f"echo {quote(KSDS_CREATE_JCL.format(MVS_VS))} > {TEMP_JCL_PATH}/SAMPLE" ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 + src=f"{TEMP_JCL_PATH}/SAMPLE", location="uss", wait_time_s=30 ) for result in results.contacted.values(): @@ -592,13 +610,8 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): assert result.get("dest") == USS_DEST_FILE assert result.get("backup_name") is not None assert result.get("changed") is True - # How can we add a content validation without having to encode again ? - # cat_result = hosts.all.shell(cmd="iconv -f {0} -t {1} {2}".format(TO_ENCODING, FROM_ENCODING, USS_DEST_FILE)) - # print(cat_result.contacted.values()) - # for uss_file_result in cat_result.contacted.values(): - # assert TEST_DATA in uss_file_result.get("stdout") - tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_FILE}") for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: @@ -613,7 +626,12 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): MVS_VS = get_tmp_ds_name() create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PS, + state="present", + type="seq", + record_length=TEST_DATA_RECORD_LENGTH + ) results = hosts.all.zos_encode( src=MVS_VS, dest=MVS_PS, @@ -637,7 +655,12 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): MVS_PDS = get_tmp_ds_name() create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) MVS_PDS_MEMBER = MVS_PDS + '(MEM)' - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH) + hosts.all.zos_data_set( + name=MVS_PDS, + state="present", + type="pds", + record_length=TEST_DATA_RECORD_LENGTH + ) hosts.all.zos_data_set( name=MVS_PDS_MEMBER, type="member", state="present" ) @@ -651,7 +674,6 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): ) hosts.all.zos_data_set(name=MVS_PDS, state="absent") for result in results.contacted.values(): - print(result) assert result.get("src") == MVS_VS assert result.get("dest") == MVS_PDS_MEMBER assert result.get("backup_name") is None @@ -668,10 +690,10 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) + cmd=f"echo {quote(KSDS_CREATE_JCL.format(MVS_VS))} > {TEMP_JCL_PATH}/SAMPLE" ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 + src=f"{TEMP_JCL_PATH}/SAMPLE", location="uss", wait_time_s=30 ) for result in results.contacted.values(): assert result.get("jobs") is not None @@ -705,8 +727,8 @@ def test_pds_backup(ansible_zos_module): hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") - hosts.all.shell(cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH)) - hosts.all.shell(cmd="cp {0} \"//'{1}(SAMPLE)'\"".format(TEMP_JCL_PATH, MVS_PDS)) + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") + hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE)'\"") hosts.all.zos_encode( src=MVS_PDS, encoding={ @@ -716,7 +738,7 @@ def test_pds_backup(ansible_zos_module): backup=True, backup_name=BACKUP_DATA_SET, ) - contents = hosts.all.shell(cmd="cat \"//'{0}(SAMPLE)'\"".format(BACKUP_DATA_SET)) + contents = hosts.all.shell(cmd=f"cat \"//'{BACKUP_DATA_SET}(SAMPLE)'\"") for content in contents.contacted.values(): # pprint(content) assert TEST_FILE_TEXT in content.get("stdout") @@ -734,8 +756,8 @@ def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") - hosts.all.shell(cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH)) - hosts.all.shell(cmd="cp {0} \"//'{1}(SAMPLE)'\"".format(TEMP_JCL_PATH, MVS_PDS)) + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") + hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE)'\"") encode_res = hosts.all.zos_encode( src=MVS_PDS, encoding={ @@ -767,8 +789,8 @@ def test_ps_backup(ansible_zos_module): hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.shell(cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH)) - hosts.all.shell(cmd="cp {0} \"//'{1}'\"".format(TEMP_JCL_PATH, MVS_PS)) + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") + hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PS}'\"") hosts.all.zos_encode( src=MVS_PS, encoding={ @@ -778,7 +800,7 @@ def test_ps_backup(ansible_zos_module): backup=True, backup_name=BACKUP_DATA_SET, ) - contents = hosts.all.shell(cmd="cat \"//'{0}'\"".format(BACKUP_DATA_SET)) + contents = hosts.all.shell(cmd=f"cat \"//'{BACKUP_DATA_SET}'\"") for content in contents.contacted.values(): assert TEST_FILE_TEXT in content.get("stdout") finally: @@ -800,21 +822,19 @@ def test_vsam_backup(ansible_zos_module): ) hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(KSDS_CREATE_JCL.format(MVS_VS)), TEMP_JCL_PATH) + cmd=f"echo {quote(KSDS_CREATE_JCL.format(MVS_VS))} > {TEMP_JCL_PATH}/SAMPLE" ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 + src=f"{TEMP_JCL_PATH}/SAMPLE", location="uss", wait_time_s=30 ) hosts.all.file(path=TEMP_JCL_PATH, state="absent") # submit JCL to populate KSDS hosts.all.file(path=TEMP_JCL_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format( - quote(KSDS_REPRO_JCL.format(MVS_VS.upper())), TEMP_JCL_PATH - ) + cmd=f"echo {quote(KSDS_REPRO_JCL.format(MVS_VS.upper()))} > {TEMP_JCL_PATH}/SAMPLE" ) hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_JCL_PATH), location="uss", wait_time_s=30 + src=f"{TEMP_JCL_PATH}/SAMPLE", location="uss", wait_time_s=30 ) hosts.all.zos_encode( @@ -860,21 +880,21 @@ def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module) # create and fill PDS hosts.all.zos_data_set(name=MVS_PDS, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") - hosts.all.shell(cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH)) - hosts.all.shell(cmd="cp {0} \"//'{1}(SAMPLE)'\"".format(TEMP_JCL_PATH, MVS_PDS)) - hosts.all.shell(cmd="cp {0} \"//'{1}(SAMPLE2)'\"".format(TEMP_JCL_PATH, MVS_PDS)) - hosts.all.shell(cmd="cp {0} \"//'{1}(SAMPLE3)'\"".format(TEMP_JCL_PATH, MVS_PDS)) + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") + hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE)'\"") + hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE2)'\"") + hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE3)'\"") # create and fill directory hosts.all.file(path=TEMP_JCL_PATH + "2", state="absent") hosts.all.file(path=TEMP_JCL_PATH + "2", state="directory") hosts.all.shell( - cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH + "2/file1") + cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file1" ) hosts.all.shell( - cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH + "2/file2") + cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file2" ) hosts.all.shell( - cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH + "2/file3") + cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file3" ) results = hosts.all.zos_encode( src=MVS_PDS, @@ -889,15 +909,15 @@ def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module) for result in results.contacted.values(): backup_name = result.get("backup_name") assert backup_name - contents = hosts.all.shell(cmd="cat {0}".format(backup_name + "file1")) + contents = hosts.all.shell(cmd=f"cat {backup_name}file1") content1 = "" for content in contents.contacted.values(): content1 = content.get("stdout") - contents = hosts.all.shell(cmd="cat {0}".format(backup_name + "file2")) + contents = hosts.all.shell(cmd=f"cat {backup_name}file2") content2 = "" for content in contents.contacted.values(): content2 = content.get("stdout") - contents = hosts.all.shell(cmd="cat {0}".format(backup_name + "file3")) + contents = hosts.all.shell(cmd=f"cat {backup_name}file3") content3 = "" for content in contents.contacted.values(): content3 = content.get("stdout") @@ -925,21 +945,21 @@ def test_uss_backup_entire_folder_to_default_backup_location_compressed( # create and fill PDS hosts.all.zos_data_set(name=MVS_PDS, state="absent") hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") - hosts.all.shell(cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH)) - hosts.all.shell(cmd="cp {0} \"//'{1}(SAMPLE)'\"".format(TEMP_JCL_PATH, MVS_PDS)) - hosts.all.shell(cmd="cp {0} \"//'{1}(SAMPLE2)'\"".format(TEMP_JCL_PATH, MVS_PDS)) - hosts.all.shell(cmd="cp {0} \"//'{1}(SAMPLE3)'\"".format(TEMP_JCL_PATH, MVS_PDS)) + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") + hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE)'\"") + hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE2)'\"") + hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE3)'\"") # create and fill directory hosts.all.file(path=TEMP_JCL_PATH + "2", state="absent") hosts.all.file(path=TEMP_JCL_PATH + "2", state="directory") hosts.all.shell( - cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH + "2/file1") + cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file1" ) hosts.all.shell( - cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH + "2/file2") + cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file2" ) hosts.all.shell( - cmd="echo '{0}' > {1}".format(TEST_FILE_TEXT, TEMP_JCL_PATH + "2/file3") + cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file3" ) results = hosts.all.zos_encode( src=MVS_PDS, @@ -955,7 +975,7 @@ def test_uss_backup_entire_folder_to_default_backup_location_compressed( for result in results.contacted.values(): backup_name = result.get("backup_name") - results = hosts.all.shell(cmd="ls -la {0}".format(backup_name[:-4] + "*")) + results = hosts.all.shell(cmd=f"ls -la {backup_name[:-4]}*") for result in results.contacted.values(): assert backup_name in result.get("stdout") finally: @@ -972,7 +992,7 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(TEST_FILE_TEXT, MVS_PS)) + hosts.all.shell(cmd=f"decho \"{TEST_FILE_TEXT}\" \"{MVS_PS}\"") enc_ds = hosts.all.zos_encode( src=MVS_PS, encoding={ @@ -1004,4 +1024,4 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): finally: hosts.all.file(path=TEMP_JCL_PATH, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") \ No newline at end of file + hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") From 5dd094aa8e2259c946de0bcc0e88a1afb63b4134 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 7 Jun 2024 13:46:14 -0600 Subject: [PATCH 407/495] [Enabler][test_zos_mvs_raw_func] Update test suites on functional/modules/test_zos_mvs_raw_func.py to be pylint correct (#1472) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_job_submit_func.py to be pylint correct * Remove accidental changes on functional/modules/test_zos_job_submit_func.py * Update test suites on functional/modules/test_zos_mvs_raw_func.py to be pylint correct * Update test_zos_mvs_raw_func.py * Fix output --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> --- .../modules/test_zos_mvs_raw_func.py | 1671 +++++++++-------- 1 file changed, 903 insertions(+), 768 deletions(-) diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index f1f901064..a7a5667f5 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -16,7 +16,6 @@ __metaclass__ = type import pytest -from pprint import pprint from ibm_zos_core.tests.helpers.volumes import Volume_Handler from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name @@ -24,11 +23,11 @@ DATASET = "" EXISTING_DATA_SET = "user.private.proclib" DEFAULT_PATH = "/tmp/testdir" -DEFAULT_PATH_WITH_FILE = "{0}/testfile".format(DEFAULT_PATH) +DEFAULT_PATH_WITH_FILE = f"{DEFAULT_PATH}/testfile" DEFAULT_DD = "MYDD" SYSIN_DD = "SYSIN" SYSPRINT_DD = "SYSPRINT" -IDCAMS_STDIN = " LISTCAT ENTRIES('{0}')".format(EXISTING_DATA_SET.upper()) +IDCAMS_STDIN = f" LISTCAT ENTRIES('{EXISTING_DATA_SET.upper()}')" IDCAMS_INVALID_STDIN = " hello world #$!@%!#$!@``~~^$*%" @@ -41,10 +40,14 @@ def test_failing_name_format(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_mvs_raw( program_name="idcams", - dds=[dict(dd_data_set=dict(dd_name=DEFAULT_DD, data_set_name="!!^&.BAD.NAME"))], + dds=[{ + "dd_data_set":{ + "dd_name":DEFAULT_DD, + "data_set_name":"!!^&.BAD.NAME" + } + }], ) for result in results.contacted.values(): - pprint(result) assert "ValueError" in result.get("msg") @@ -57,20 +60,26 @@ def test_disposition_new(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: @@ -92,19 +101,25 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition=disposition, - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":disposition, + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: @@ -125,34 +140,39 @@ def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, auth=True, tmp_hlq=tmphlq, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - return_content=dict(type="text"), - replace=True, - backup=True, - type="seq", - space_primary=5, - space_secondary=1, - space_type="m", - volumes=default_volume, - record_format="fb" - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "return_content":{ + "type":"text" + }, + "replace":True, + "backup":True, + "type":"seq", + "space_primary":5, + "space_secondary":1, + "space_type":"m", + "volumes":default_volume, + "record_format":"fb" + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 for backup in result.get("backups"): backup.get("backup_name")[:6] == tmphlq results = hosts.all.zos_data_set(name=default_data_set, state="absent") for result in results.contacted.values(): - pprint(result) assert result.get("changed", False) is True @@ -161,27 +181,33 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() - DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' + default_data_set_with_member = default_data_set + '(MEM)' hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET_WITH_MEMBER, - disposition="new", - type="pds", - directory_blocks=15, - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set_with_member, + "disposition":"new", + "type":"pds", + "directory_blocks":15, + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 8 finally: hosts.all.zos_data_set(name=default_data_set, state="absent") @@ -195,7 +221,7 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() - DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' + default_data_set_with_member = default_data_set + '(MEM)' hosts.all.zos_data_set( name=default_data_set, type="pds", state="present", replace=True ) @@ -203,19 +229,25 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=DEFAULT_DATA_SET_WITH_MEMBER, - disposition=disposition, - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set_with_member, + "disposition":disposition, + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: @@ -226,7 +258,12 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit "normal_disposition,changed", [("keep", True), ("delete", True), ("catalog", True), ("uncatalog", True)], ) -def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, changed, volumes_on_systems): +def test_normal_dispositions_data_set( + ansible_zos_module, + normal_disposition, + changed, + volumes_on_systems +): try: hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) @@ -243,21 +280,27 @@ def test_normal_dispositions_data_set(ansible_zos_module, normal_disposition, ch program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="shr", - disposition_normal=normal_disposition, - volumes=[volume_1], - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"shr", + "disposition_normal":normal_disposition, + "volumes":[volume_1], + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: @@ -283,31 +326,36 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - space_primary=primary, - space_secondary=secondary, - space_type=space_type, - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "space_primary":primary, + "space_secondary":secondary, + "space_type":space_type, + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) - results2 = hosts.all.command(cmd="dls -l -s {0}".format(default_data_set)) + results2 = hosts.all.command(cmd=f"dls -l -s {default_data_set}") for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 for result in results2.contacted.values(): - pprint(result) assert str(expected) in result.get("stdout", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") @@ -328,22 +376,26 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_s program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type=data_set_type, - volumes=[volume_1], - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":data_set_type, + "volumes":[volume_1], + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) - results = hosts.all.command(cmd="dls {0}".format(default_data_set)) + results = hosts.all.command(cmd=f"dls {default_data_set}") for result in results.contacted.values(): - pprint(result) assert "BGYSC1103E" not in result.get("stderr", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") @@ -365,33 +417,38 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste auth=True, dds=[ # * ksds requires additional parameters - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type=data_set_type, - volumes=[volume_1], - ), - ) + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":data_set_type, + "volumes":[volume_1], + }, + } if data_set_type != "ksds" - else dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type=data_set_type, - key_length=5, - key_offset=0, - volumes=[volume_1], - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + else { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":data_set_type, + "key_length":5, + "key_offset":0, + "volumes":[volume_1], + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) # * we hope to see EDC5041I An error was detected at the system level when opening a file. # * because that means data set exists and is VSAM so we can't read it - results = hosts.all.command(cmd="head \"//'{0}'\"".format(default_data_set)) + results = hosts.all.command(cmd=f"head \"//'{default_data_set}'\"") for result in results.contacted.values(): assert "EDC5041I" or "EDC5049I" in result.get("stderr", "") finally: @@ -413,24 +470,28 @@ def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - record_format=record_format, - volumes=[volume_1], - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "record_format":record_format, + "volumes":[volume_1], + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) - results = hosts.all.command(cmd="dls -l {0}".format(default_data_set)) + results = hosts.all.command(cmd=f"dls -l {default_data_set}") for result in results.contacted.values(): - pprint(result) - assert str(" {0} ".format(record_format.upper())) in result.get("stdout", "") + assert str(f" {record_format.upper()} ") in result.get("stdout", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") @@ -462,21 +523,27 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="shr", - volumes=[volume_1], - return_content=dict(type=return_content_type), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"shr", + "volumes":[volume_1], + "return_content":{ + "type":return_content_type + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) @@ -514,24 +581,28 @@ def test_return_text_content_encodings( program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="shr", - volumes=[volume_1], - return_content=dict( - type="text", - src_encoding=src_encoding, - response_encoding=response_encoding, - ), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"shr", + "volumes":[volume_1], + "return_content":{ + "type":"text", + "src_encoding":src_encoding, + "response_encoding":response_encoding, + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) @@ -550,22 +621,28 @@ def test_reuse_existing_data_set(ansible_zos_module): program_name="IDCAMS", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - reuse=True, - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "reuse":True, + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", 0) == 0 assert len(result.get("dd_names", [])) > 0 finally: @@ -583,22 +660,28 @@ def test_replace_existing_data_set(ansible_zos_module): program_name="IDCAMS", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - replace=True, - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "replace":True, + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", 0) == 0 assert len(result.get("dd_names", [])) > 0 finally: @@ -614,39 +697,52 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): program_name="IDCAMS", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - replace=True, - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "replace":True, + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) results = hosts.all.zos_mvs_raw( program_name="IDCAMS", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - replace=True, - backup=True, - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "replace":True, + "backup":True, + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", 0) == 0 assert len(result.get("dd_names", [])) > 0 assert len(result.get("backups", [])) > 0 @@ -662,7 +758,6 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): == default_data_set.lower() ) for result in results2.contacted.values(): - pprint(result) assert "IDCAMS" in result.get("stdout", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") @@ -789,20 +884,26 @@ def test_input_empty(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content="")), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":"" + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: @@ -816,25 +917,31 @@ def test_input_large(ansible_zos_module): hosts.all.zos_data_set(name=default_data_set, state="absent") contents = "" for i in range(50000): - contents += "this is line {0}\n".format(i) + contents += f"this is line {i}\n" results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=contents)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":contents + } + }, ], ) for result in results.contacted.values(): - # pprint(result) assert result.get("ret_code", {}).get("code", -1) == 12 assert len(result.get("dd_names", [])) > 0 assert len(result.get("dd_names", [{}])[0].get("content")) > 100000 @@ -854,20 +961,26 @@ def test_input_provided_as_list(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=contents)), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":contents + } + }, ], ) for result in results.contacted.values(): - # pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert len(result.get("dd_names", [{}])[0].get("content")) > 100 @@ -894,25 +1007,26 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - return_content=dict(type=return_content_type), - ) - ), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + "return_content":{ + "type":return_content_type + }, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names", [{}])[0].get("content")) @@ -946,29 +1060,28 @@ def test_input_return_text_content_encodings( program_name="idcams", auth=True, dds=[ - dict( - dd_data_set=dict( - dd_name=SYSPRINT_DD, - data_set_name=default_data_set, - disposition="new", - type="seq", - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - return_content=dict( - type="text", - src_encoding=src_encoding, - response_encoding=response_encoding, - ), - ) - ), + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + "return_content":{ + "type":"text", + "src_encoding":src_encoding, + "response_encoding":response_encoding, + }, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names", [{}])[0].get("content")) @@ -987,22 +1100,21 @@ def test_failing_path_name(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path="1dfa3f4rafwer/f2rfsd", - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":"1dfa3f4rafwer/f2rfsd", + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 8 assert "ValueError" in result.get("msg", "") @@ -1016,26 +1128,24 @@ def test_create_new_file(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) - results2 = hosts.all.command(cmd="cat {0}".format(DEFAULT_PATH_WITH_FILE)) + results2 = hosts.all.command(cmd=f"cat {DEFAULT_PATH_WITH_FILE}") for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 for result in results2.contacted.values(): - pprint(result) assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") @@ -1050,26 +1160,24 @@ def test_write_to_existing_file(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) - results2 = hosts.all.command(cmd="cat {0}".format(DEFAULT_PATH_WITH_FILE)) + results2 = hosts.all.command(cmd=f"cat {DEFAULT_PATH_WITH_FILE}") for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 for result in results2.contacted.values(): - pprint(result) assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") @@ -1087,27 +1195,25 @@ def test_file_normal_disposition(ansible_zos_module, normal_disposition, expecte program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - disposition_normal=normal_disposition, - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + "disposition_normal":normal_disposition, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) results2 = hosts.all.stat(path=DEFAULT_PATH_WITH_FILE) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 for result in results2.contacted.values(): - pprint(result) assert result.get("stat", {}).get("exists", not expected) is expected finally: hosts.all.file(path=DEFAULT_PATH, state="absent") @@ -1122,27 +1228,25 @@ def test_file_modes(ansible_zos_module, mode, expected): program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - mode=mode, - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + "mode":mode, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) results2 = hosts.all.stat(path=DEFAULT_PATH_WITH_FILE) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 for result in results2.contacted.values(): - pprint(result) assert result.get("stat", {}).get("mode", "") == expected finally: hosts.all.file(path=DEFAULT_PATH, state="absent") @@ -1164,28 +1268,26 @@ def test_file_path_options(ansible_zos_module, access_group, status_group): program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - access_group=access_group, - status_group=status_group, - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + "access_group":access_group, + "status_group":status_group, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) - results2 = hosts.all.command(cmd="cat {0}".format(DEFAULT_PATH_WITH_FILE)) + results2 = hosts.all.command(cmd=f"cat {DEFAULT_PATH_WITH_FILE}") for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 for result in results2.contacted.values(): - pprint(result) assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") @@ -1204,27 +1306,25 @@ def test_file_block_size(ansible_zos_module, block_size): program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - block_size=block_size, - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + "block_size":block_size, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) - results2 = hosts.all.command(cmd="cat {0}".format(DEFAULT_PATH_WITH_FILE)) + results2 = hosts.all.command(cmd=f"cat {DEFAULT_PATH_WITH_FILE}") for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 for result in results2.contacted.values(): - pprint(result) assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") @@ -1243,27 +1343,25 @@ def test_file_record_length(ansible_zos_module, record_length): program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - record_length=record_length, - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + "record_length":record_length, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) - results2 = hosts.all.command(cmd="cat {0}".format(DEFAULT_PATH_WITH_FILE)) + results2 = hosts.all.command(cmd=f"cat {DEFAULT_PATH_WITH_FILE}") for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 for result in results2.contacted.values(): - pprint(result) assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") @@ -1282,27 +1380,25 @@ def test_file_record_format(ansible_zos_module, record_format): program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - record_format=record_format, - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + "record_format":record_format, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) - results2 = hosts.all.command(cmd="cat {0}".format(DEFAULT_PATH_WITH_FILE)) + results2 = hosts.all.command(cmd=f"cat {DEFAULT_PATH_WITH_FILE}") for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 for result in results2.contacted.values(): - pprint(result) assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") @@ -1327,23 +1423,24 @@ def test_file_return_content(ansible_zos_module, return_content_type, expected): program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - return_content=dict(type=return_content_type), - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + "return_content":{ + "type":return_content_type + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) @@ -1373,27 +1470,26 @@ def test_file_return_text_content_encodings( program_name="idcams", auth=True, dds=[ - dict( - dd_unix=dict( - dd_name=SYSPRINT_DD, - path=DEFAULT_PATH_WITH_FILE, - return_content=dict( - type="text", - src_encoding=src_encoding, - response_encoding=response_encoding, - ), - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":DEFAULT_PATH_WITH_FILE, + "return_content":{ + "type":"text", + "src_encoding":src_encoding, + "response_encoding":response_encoding, + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) @@ -1415,21 +1511,20 @@ def test_dummy(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_dummy=dict( - dd_name=SYSPRINT_DD, - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + { + "dd_dummy":{ + "dd_name":SYSPRINT_DD, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) == 0 finally: @@ -1445,102 +1540,104 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() - DEFAULT_DATA_SET_2 = get_tmp_ds_name() + default_data_set_2 = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") + hosts.all.zos_data_set(name=default_data_set_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, dds=[ - dict( - dd_concat=dict( - dd_name=SYSPRINT_DD, - dds=[ - dict( - dd_data_set=dict( - data_set_name=default_data_set, - disposition="new", - type="seq", - return_content=dict(type="text"), - ) - ), - dict( - dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET_2, - disposition="new", - type="seq", - ) - ), + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_data_set":{ + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_data_set":{ + "data_set_name":default_data_set_2, + "disposition":"new", + "type":"seq", + } + }, ], - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) finally: hosts.all.zos_data_set(name=default_data_set, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") + hosts.all.zos_data_set(name=default_data_set_2, state="absent") def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_module): try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() - DEFAULT_DATA_SET_2 = get_tmp_ds_name() + default_data_set_2 = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="present", type="seq") + hosts.all.zos_data_set(name=default_data_set_2, state="present", type="seq") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, dds=[ - dict( - dd_concat=dict( - dd_name=SYSPRINT_DD, - dds=[ - dict( - dd_data_set=dict( - data_set_name=default_data_set, - disposition="new", - type="seq", - replace=True, - backup=True, - return_content=dict(type="text"), - ) - ), - dict( - dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET_2, - disposition="new", - type="seq", - replace=True, - backup=True, - ) - ), + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_data_set":{ + "data_set_name":default_data_set, + "disposition":"new", + "type":"seq", + "replace":True, + "backup":True, + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_data_set":{ + "data_set_name":default_data_set_2, + "disposition":"new", + "type":"seq", + "replace":True, + "backup":True, + } + }, ], - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) for result in results.contacted.values(): - pprint(result) hosts.all.zos_data_set( name=result.get("backups")[0].get("backup_name"), state="absent" ) @@ -1553,121 +1650,122 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu ) assert ( result.get("backups")[1].get("original_name").lower() - == DEFAULT_DATA_SET_2.lower() + == default_data_set_2.lower() ) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) finally: hosts.all.zos_data_set(name=default_data_set, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") + hosts.all.zos_data_set(name=default_data_set_2, state="absent") def test_concatenation_with_data_set_member(ansible_zos_module): try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() - DEFAULT_DATA_SET_2 = get_tmp_ds_name() - DEFAULT_DATA_SET_WITH_MEMBER = default_data_set + '(MEM)' + default_data_set_2 = get_tmp_ds_name() + default_data_set_with_member = default_data_set + '(MEM)' hosts.all.zos_data_set(name=default_data_set, state="present", type="pds") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") + hosts.all.zos_data_set(name=default_data_set_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, dds=[ - dict( - dd_concat=dict( - dd_name=SYSPRINT_DD, - dds=[ - dict( - dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET_WITH_MEMBER, - return_content=dict(type="text"), - ) - ), - dict( - dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET_2, - disposition="new", - type="seq", - ) - ), + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_data_set":{ + "data_set_name":default_data_set_with_member, + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_data_set":{ + "data_set_name":default_data_set_2, + "disposition":"new", + "type":"seq", + } + }, ], - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) results2 = hosts.all.shell( - cmd="cat \"//'{0}'\"".format(DEFAULT_DATA_SET_WITH_MEMBER) + cmd=f"cat \"//'{default_data_set_with_member}'\"" ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) for result in results2.contacted.values(): - pprint(result) assert "IDCAMS" in result.get("stdout", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") + hosts.all.zos_data_set(name=default_data_set_2, state="absent") def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): try: hosts = ansible_zos_module - DEFAULT_DATA_SET_2 = get_tmp_ds_name() + default_data_set_2 = get_tmp_ds_name() hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") + hosts.all.zos_data_set(name=default_data_set_2, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, dds=[ - dict( - dd_concat=dict( - dd_name=SYSPRINT_DD, - dds=[ - dict( - dd_unix=dict( - path=DEFAULT_PATH_WITH_FILE, - return_content=dict(type="text"), - ) - ), - dict( - dd_data_set=dict( - data_set_name=DEFAULT_DATA_SET_2, - disposition="new", - type="seq", - ) - ), + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_unix":{ + "path":DEFAULT_PATH_WITH_FILE, + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_data_set":{ + "data_set_name":default_data_set_2, + "disposition":"new", + "type":"seq", + } + }, ], - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) finally: hosts.all.file(name=DEFAULT_PATH, state="absent") - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_2, state="absent") + hosts.all.zos_data_set(name=default_data_set_2, state="absent") def test_concatenation_with_unix_dd_and_response_uss(ansible_zos_module): @@ -1679,35 +1777,38 @@ def test_concatenation_with_unix_dd_and_response_uss(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_concat=dict( - dd_name=SYSPRINT_DD, - dds=[ - dict( - dd_unix=dict( - path=DEFAULT_PATH_WITH_FILE, - return_content=dict(type="text"), - ) - ), - dict( - dd_input=dict( - content="Hello world!", - return_content=dict(type="text"), - ) - ), + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_unix":{ + "path":DEFAULT_PATH_WITH_FILE, + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_input":{ + "content":"Hello world!", + "return_content":{ + "type":"text" + }, + } + }, ], - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 1 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) @@ -1722,30 +1823,31 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_concat=dict( - dd_name=SYSPRINT_DD, - dds=[ - dict( - dd_dummy=dict( - path=DEFAULT_PATH_WITH_FILE, - return_content=dict(type="text"), - ), - dd_concat=dict(), - ), + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_dummy":{ + "path":DEFAULT_PATH_WITH_FILE, + "return_content":{ + "type":"text" + }, + }, + "dd_concat":{}, + }, ], - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == -1 assert "Unsupported parameters" in result.get("msg", "") @@ -1755,114 +1857,132 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): [ ( [ - dict( - dd_concat=dict( - dd_name=SYSPRINT_DD, - dds=[ - dict( - dd_unix=dict( - path=DEFAULT_PATH_WITH_FILE, - return_content=dict(type="text"), - ) - ), - dict( - dd_data_set=dict( - data_set_name="ANSIBLE.USER.PRIVATE.TEST", - disposition="shr", - return_content=dict(type="text"), - ) - ), - dict( - dd_input=dict( - content="Hello world!", - return_content=dict(type="text"), - ) - ), + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_unix":{ + "path":DEFAULT_PATH_WITH_FILE, + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_data_set":{ + "data_set_name":"ANSIBLE.USER.PRIVATE.TEST", + "disposition":"shr", + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_input":{ + "content":"Hello world!", + "return_content":{ + "type":"text" + }, + } + }, ], - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], 2, "Hello world!", ), ( [ - dict( - dd_concat=dict( - dd_name=SYSPRINT_DD, - dds=[ - dict( - dd_data_set=dict( - data_set_name="ANSIBLE.USER.PRIVATE.TEST", - disposition="shr", - return_content=dict(type="text"), - ) - ), - dict( - dd_unix=dict( - path=DEFAULT_PATH_WITH_FILE, - return_content=dict(type="text"), - ) - ), - dict( - dd_input=dict( - content="Hello world!", - return_content=dict(type="text"), - ) - ), + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_data_set":{ + "data_set_name":"ANSIBLE.USER.PRIVATE.TEST", + "disposition":"shr", + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_unix":{ + "path":DEFAULT_PATH_WITH_FILE, + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_input":{ + "content":"Hello world!", + "return_content":{ + "type":"text" + }, + } + }, ], - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], 2, "Hello world!", ), ( [ - dict( - dd_concat=dict( - dd_name=SYSPRINT_DD, - dds=[ - dict( - dd_input=dict( - content="Hello world!", - return_content=dict(type="text"), - ) - ), - dict( - dd_data_set=dict( - data_set_name="ANSIBLE.USER.PRIVATE.TEST", - disposition="shr", - return_content=dict(type="text"), - ) - ), - dict( - dd_unix=dict( - path=DEFAULT_PATH_WITH_FILE, - return_content=dict(type="text"), - ) - ), + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_input":{ + "content":"Hello world!", + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_data_set":{ + "data_set_name":"ANSIBLE.USER.PRIVATE.TEST", + "disposition":"shr", + "return_content":{ + "type":"text" + }, + } + }, + { + "dd_unix":{ + "path":DEFAULT_PATH_WITH_FILE, + "return_content":{ + "type":"text" + }, + } + }, ], - ), - ), - dict( - dd_input=dict( - dd_name=SYSIN_DD, - content=IDCAMS_STDIN, - ) - ), + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN, + } + }, ], 0, "IDCAMS", @@ -1905,7 +2025,6 @@ def test_authorized_program_run_unauthorized(ansible_zos_module): dds=[], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 8 assert len(result.get("dd_names", [])) == 0 assert "BGYSC0236E" in result.get("msg", "") @@ -1924,7 +2043,6 @@ def test_unauthorized_program_run_authorized(ansible_zos_module): dds=[], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 8 assert len(result.get("dd_names", [])) == 0 assert "BGYSC0215E" in result.get("msg", "") @@ -1941,16 +2059,17 @@ def test_authorized_program_run_authorized(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_output=dict( - dd_name=SYSPRINT_DD, - return_content=dict(type="text"), - ), - ), + { + "dd_output":{ + "dd_name":SYSPRINT_DD, + "return_content":{ + "type":"text" + }, + }, + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 16 assert len(result.get("dd_names", [])) == 1 assert "BGYSC0236E" not in result.get("msg", "") @@ -1969,7 +2088,6 @@ def test_unauthorized_program_run_unauthorized(ansible_zos_module): dds=[], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) == 0 assert "BGYSC0215E" not in result.get("msg", "") @@ -1984,7 +2102,6 @@ def test_missing_program_name(ansible_zos_module): dds=[], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == -1 assert len(result.get("dd_names", [])) == 0 assert "missing required arguments" in result.get("msg", "") @@ -1999,7 +2116,6 @@ def test_with_parms(ansible_zos_module): dds=[], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) == 0 @@ -2010,12 +2126,21 @@ def test_with_multiple_of_same_dd_name(ansible_zos_module): pgm="idcams", auth=True, dds=[ - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 8 assert len(result.get("dd_names", [])) == 0 assert "BGYSC0228E" in result.get("msg", "") @@ -2032,16 +2157,20 @@ def test_vio_as_output(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_vio=dict( - dd_name=SYSPRINT_DD, - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_vio":{ + "dd_name":SYSPRINT_DD, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", 0) == 0 assert len(result.get("dd_names", [])) == 0 @@ -2060,18 +2189,24 @@ def test_output_dd(ansible_zos_module): program_name="idcams", auth=True, dds=[ - dict( - dd_output=dict( - dd_name=SYSPRINT_DD, - return_content=dict(type="text"), - ), - ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + { + "dd_output":{ + "dd_name":SYSPRINT_DD, + "return_content":{ + "type":"text" + }, + }, + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":IDCAMS_STDIN + } + }, ], ) for result in results.contacted.values(): - pprint(result) assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 assert "IDCAMS" in "\n".join(result.get("dd_names")[0].get("content", [])) From 148a35e8eec1562cf837238d07cbbc11e8fe9e2b Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 7 Jun 2024 13:47:15 -0600 Subject: [PATCH 408/495] [Enabler][test_zos_operator_func] Update test suites on functional/modules/test_zos_operator_func.py to be pylint correct (#1476) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_job_submit_func.py to be pylint correct * Remove accidental changes on functional/modules/test_zos_job_submit_func.py * Update test suites on functional/modules/test_zos_operator_func.py to be pylint correct * Take out unused imports on functional/modules/test_zos_operator_func.py function to be pylint correct * Update test_zos_operator_func.py --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- tests/functional/modules/test_zos_operator_func.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index d60d26ec2..123aeb57a 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -15,16 +15,10 @@ __metaclass__ = type -import time - -import ansible.constants -import ansible.errors -import ansible.utils import pytest -from pprint import pprint from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - zoau_version_checker + zoau_version_checker, ) @@ -64,7 +58,7 @@ def test_zos_operator_invalid_command_to_ensure_transparency(ansible_zos_module) assert result.get("changed") is True transparency = False if any('DUMP COMMAND' in str for str in result.get("content")): - transparency = True + transparency = True assert transparency @@ -140,11 +134,10 @@ def test_zos_operator_positive_verbose_blocking(ansible_zos_module): def test_response_come_back_complete(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd="\\$dspl") - res = dict() + res = {} res["stdout"] = [] for result in results.contacted.values(): stdout = result.get('content') # HASP646 Only appears in the last line that before did not appears last_line = len(stdout) assert "HASP646" in stdout[last_line - 1] - From e9bcae65f3e2b9f04cafa1d452544004574f4072 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Tue, 11 Jun 2024 20:26:07 -0600 Subject: [PATCH 409/495] [Enabler][test_arg_parser] Update test suites on functional/module-utils/test_arg_parser.py to be pylint correct (#1442) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_arg_parser.py to be pylint correct * Update test suites on functional/module-utils/test_arg_parser.py to be pylint correct * Update test_arg_parser.py --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .../module_utils/test_arg_parser.py | 916 +++++++++++------- 1 file changed, 564 insertions(+), 352 deletions(-) diff --git a/tests/functional/module_utils/test_arg_parser.py b/tests/functional/module_utils/test_arg_parser.py index c27885b7a..0dd52eb0f 100644 --- a/tests/functional/module_utils/test_arg_parser.py +++ b/tests/functional/module_utils/test_arg_parser.py @@ -15,81 +15,103 @@ __metaclass__ = type -from ibm_zos_core.plugins.module_utils.better_arg_parser import BetterArgParser -import pytest import re +import pytest +from ibm_zos_core.plugins.module_utils.better_arg_parser import BetterArgParser -arg_defs = dict( - batch=dict( - arg_type="list", - elements="dict", - options=dict( - name=dict( - required=True, - ), - state=dict( - arg_type="str", - default="present", - ), - type=dict( - arg_type="str", - required=False, - ), - size=dict(arg_type="str", required=False), - format=dict( - arg_type="str", - required=False, - ), - data_class=dict( - arg_type="str", - required=False, - ), - record_length=dict( - arg_type="int", - ), - replace=dict( - arg_type="bool", - default=False, - ), - ), - ), - name=dict(arg_type="str"), - state=dict( - arg_type="str", - default="present", +arg_defs = { + "batch":{ + "arg_type":"list", + "elements":"dict", + "options":{ + "name":{ + "required":True, + }, + "state":{ + "arg_type":"str", + "default":"present", + }, + "type":{ + "arg_type":"str", + "required":False, + }, + "size":{ + "arg_type":"str", + "required":False + }, + "format":{ + "arg_type":"str", + "required":False, + }, + "data_class":{ + "arg_type":"str", + "required":False, + }, + "record_length":{ + "arg_type":"int", + }, + "replace":{ + "arg_type":"bool", + "default":False, + }, + }, + }, + "name":{ + "arg_type":"str" + }, + "state":{ + "arg_type":"str", + "default":"present", # choices=['present','absent'] - ), - type=dict( - arg_type="str", - required=False, - ), - size=dict(arg_type="str", required=False), - format=dict( - arg_type="str", - required=False, - ), - data_class=dict( - arg_type="str", - required=False, - aliases=["dataclas"], - dependencies=["record_length", "state"], - ), - record_length=dict( - arg_type="int", aliases=["length", "lrecl"], dependencies=["replace", "size"] - ), - replace=dict(arg_type="bool", default=False, dependencies=[]), -) + }, + "type":{ + "arg_type":"str", + "required":False, + }, + "size":{ + "arg_type":"str", + "required":False + }, + "format":{ + "arg_type":"str", + "required":False, + }, + "data_class":{ + "arg_type":"str", + "required":False, + "aliases":["dataclas"], + "dependencies":["record_length", "state"], + }, + "record_length":{ + "arg_type":"int", "aliases":["length", "lrecl"], "dependencies":["replace", "size"] + }, + "replace":{ + "arg_type":"bool", + "default":False, + "dependencies":[] + }, +} def test_default_top_level(): - arg_defs = dict(state=dict(arg_type="str", default="present")) + arg_defs = { + "state":{ + "arg_type":"str", + "default":"present" + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({}) assert result.get("state") == "present" def test_required_top_level_no_default(): - arg_defs = dict(name=dict(arg_type="str", required=True)) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True + } + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({}) @@ -97,26 +119,50 @@ def test_required_top_level_no_default(): def test_required_top_level_with_default(): default_name = "samplename" - arg_defs = dict(name=dict(arg_type="str", required=True, default=default_name)) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + "default":default_name + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({}) assert result.get("name") == default_name -arg_defs_1 = dict( - name=dict( - arg_type="str", required=True, default="samplename", dependencies=["date"] - ), - date=dict(arg_type="str", default="may 1, 2020", dependencies=["name"]), -) - -arg_defs_2 = dict( - name=dict( - arg_type="str", required=True, default="samplename", dependencies=["time"] - ), - date=dict(arg_type="str", default="may 1, 2020", dependencies=["name"]), - time=dict(arg_type="int", default="3945297", dependencies=["date"]), -) +arg_defs_1 = { + "name":{ + "arg_type":"str", + "required":True, + "default":"samplename", + "dependencies":["date"] + }, + "date":{ + "arg_type":"str", + "default":"may 1, 2020", + "dependencies":["name"] + }, +} + +arg_defs_2 = { + "name":{ + "arg_type":"str", + "required":True, + "default":"samplename", + "dependencies":["time"] + }, + "date":{ + "arg_type":"str", + "default":"may 1, 2020", + "dependencies":["name"] + }, + "time":{ + "arg_type":"int", + "default":"3945297", + "dependencies":["date"] + }, +} @pytest.mark.parametrize("arg_defs", [arg_defs_1, arg_defs_2]) @@ -127,16 +173,34 @@ def test_cyclic_dependency_catching(arg_defs): def test_unknown_arg_ignore(): default_name = "samplename" - arg_defs = dict(name=dict(arg_type="str", required=True, default=default_name)) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + "default":default_name + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"otherarg": "testing"}) assert "otherarg" not in result.keys() -provided_args_1 = {"name": "somename", "dt": "jan 3 2017", "sometime": "97887"} -provided_args_2 = {"date": "jan 3 2017", "sometime": "97887"} -provided_args_3 = {"bestdate": "jan 3 2017", "datetime": "97887"} -provided_args_4 = {"datetime": "97887"} +provided_args_1 = { + "name": "somename", + "dt": "jan 3 2017", + "sometime": "97887" +} +provided_args_2 = { + "date": "jan 3 2017", + "sometime": "97887" +} +provided_args_3 = { + "bestdate": "jan 3 2017", + "datetime": "97887" +} +provided_args_4 = { + "datetime": "97887" +} @pytest.mark.parametrize( @@ -144,15 +208,23 @@ def test_unknown_arg_ignore(): [provided_args_1, provided_args_2, provided_args_3, provided_args_4], ) def test_alias_resolution(provided_args): - arg_defs = dict( - name=dict( - arg_type="str", - required=True, - default="samplename", - ), - date=dict(arg_type="str", default="may 1, 2020", aliases=["bestdate", "dt"]), - time=dict(arg_type="int", default="3945297", aliases=["sometime", "datetime"]), - ) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + "default":"samplename", + }, + "date":{ + "arg_type":"str", + "default":"may 1, 2020", + "aliases":["bestdate", "dt"] + }, + "time":{ + "arg_type":"int", + "default":"3945297", + "aliases":["sometime", "datetime"] + }, + } parser = BetterArgParser(arg_defs) result = parser.parse_args(provided_args) result_simple = [str(x) for x in result.values()] @@ -165,12 +237,12 @@ def test_alias_resolution(provided_args): @pytest.mark.parametrize("arg_val", ["asdfadfa234", "#@#$@fasdfa"]) def test_str_type_validation_success(arg_val): - arg_defs = dict( - name=dict( - arg_type="str", - required=True, - ) - ) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"name": arg_val}) assert result.get("name") == arg_val @@ -182,12 +254,12 @@ def dummyfunc(): @pytest.mark.parametrize("arg_val", [dummyfunc, 32143, True]) def test_str_type_validation_failure(arg_val): - arg_defs = dict( - name=dict( - arg_type="str", - required=True, - ) - ) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + } + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({"name": arg_val}) @@ -195,12 +267,12 @@ def test_str_type_validation_failure(arg_val): @pytest.mark.parametrize("arg_val", [231, "3124", 0]) def test_int_type_validation_success(arg_val): - arg_defs = dict( - somenum=dict( - arg_type="int", - required=True, - ) - ) + arg_defs = { + "somenum":{ + "arg_type":"int", + "required":True, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"somenum": arg_val}) assert result.get("somenum") == int(arg_val) @@ -208,12 +280,12 @@ def test_int_type_validation_success(arg_val): @pytest.mark.parametrize("arg_val", [dummyfunc, "3341h132j1231x", True]) def test_int_type_validation_failure(arg_val): - arg_defs = dict( - somenum=dict( - arg_type="int", - required=True, - ) - ) + arg_defs = { + "somenum":{ + "arg_type":"int", + "required":True, + } + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({"somenum": arg_val}) @@ -221,12 +293,12 @@ def test_int_type_validation_failure(arg_val): @pytest.mark.parametrize("arg_val", [True, False]) def test_bool_type_validation_success(arg_val): - arg_defs = dict( - somebool=dict( - arg_type="bool", - required=True, - ) - ) + arg_defs = { + "somebool":{ + "arg_type":"bool", + "required":True, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"somebool": arg_val}) assert result.get("somebool") == arg_val @@ -236,12 +308,12 @@ def test_bool_type_validation_success(arg_val): "arg_val", [dummyfunc, "3341h132j1231x", 0, 1, "True", "false"] ) def test_bool_type_validation_failure(arg_val): - arg_defs = dict( - somebool=dict( - arg_type="bool", - required=True, - ) - ) + arg_defs = { + "somebool":{ + "arg_type":"bool", + "required":True, + } + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({"somebool": arg_val}) @@ -268,12 +340,12 @@ def always_returns_same_val(value, dependencies): ], ) def test_basic_user_provided_type_func(arg_type, expected): - arg_defs = dict( - someval=dict( - arg_type=arg_type, - required=True, - ) - ) + arg_defs = { + "someval":{ + "arg_type":arg_type, + "required":True, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"someval": expected}) assert result.get("someval") == expected @@ -290,60 +362,68 @@ def is_dependency_uppercase(value, dependencies): def test_user_provided_type_func_with_dependencies(): - arg_defs = dict( - uppername=dict( - arg_type=make_uppercase, - required=True, - ), - verifier=dict( - arg_type=is_dependency_uppercase, required=True, dependencies=["uppername"] - ), - ) + arg_defs = { + "uppername":{ + "arg_type":make_uppercase, + "required":True, + }, + "verifier":{ + "arg_type":is_dependency_uppercase, + "required":True, + "dependencies":["uppername"] + }, + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"uppername": "dadafasdf", "verifier": "doesntmatter"}) assert result.get("verifier") def test_user_provided_type_func_with_dependencies_make_fail(): - arg_defs = dict( - uppername=dict( - arg_type=make_uppercase, required=True, dependencies=["verifier"] - ), - verifier=dict( - arg_type=is_dependency_uppercase, - required=True, - ), - ) + arg_defs = { + "uppername":{ + "arg_type":make_uppercase, + "required":True, + "dependencies":["verifier"] + }, + "verifier":{ + "arg_type":is_dependency_uppercase, + "required":True, + }, + } parser = BetterArgParser(arg_defs) with pytest.raises(TypeError): parser.parse_args({"uppername": "dadafasdf", "verifier": "doesntmatter"}) def test_dependent_required(): - arg_defs = dict( - uppername=dict( - arg_type="str", - required=True, - ), - verifier=dict( - arg_type="str", required=is_dependency_uppercase, dependencies=["uppername"] - ), - ) + arg_defs = { + "uppername":{ + "arg_type":"str", + "required":True, + }, + "verifier":{ + "arg_type":"str", + "required":is_dependency_uppercase, + "dependencies":["uppername"] + }, + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"uppername": "dadafasdf"}) assert result.get("verifier") is None def test_dependent_required_fail(): - arg_defs = dict( - uppername=dict( - arg_type="str", - required=True, - ), - verifier=dict( - arg_type="str", required=is_dependency_uppercase, dependencies=["uppername"] - ), - ) + arg_defs = { + "uppername":{ + "arg_type":"str", + "required":True, + }, + "verifier":{ + "arg_type":"str", + "required":is_dependency_uppercase, + "dependencies":["uppername"] + }, + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): result = parser.parse_args({"uppername": "DAFASDFA"}) @@ -353,54 +433,56 @@ def test_dependent_required_fail(): ("arg_val", "expected"), [("asdfafad", False), ("DFDAFFDSA", True)] ) def test_dependent_default(arg_val, expected): - arg_defs = dict( - uppername=dict( - arg_type="str", - required=True, - ), - verifier=dict( - arg_type="bool", default=is_dependency_uppercase, dependencies=["uppername"] - ), - ) + arg_defs = { + "uppername":{ + "arg_type":"str", + "required":True, + }, + "verifier":{ + "arg_type":"bool", + "default":is_dependency_uppercase, + "dependencies":["uppername"] + }, + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"uppername": arg_val}) assert result.get("verifier") == expected def test_list_of_strings_success(): - arg_defs = dict( - names=dict( - arg_type="list", - elements="str", - required=True, - ) - ) + arg_defs = { + "names":{ + "arg_type":"list", + "elements":"str", + "required":True, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"names": ["name1", "name2", "name3", "name4"]}) assert len(result.get("names")) == 4 def test_list_of_strings_failure(): - arg_defs = dict( - names=dict( - arg_type="list", - elements="str", - required=True, - ) - ) + arg_defs = { + "names":{ + "arg_type":"list", + "elements":"str", + "required":True, + } + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({"names": [1, True, "name3", "name4"]}) def test_list_of_strings_function_for_arg_type_success(): - arg_defs = dict( - names=dict( - arg_type="list", - elements=make_uppercase, - required=True, - ) - ) + arg_defs = { + "names":{ + "arg_type":"list", + "elements":make_uppercase, + "required":True, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"names": ["name1", "name2", "name3", "name4"]}) assert len(result.get("names")) == 4 @@ -409,16 +491,23 @@ def test_list_of_strings_function_for_arg_type_success(): def test_list_of_dicts_success(): - arg_defs = dict( - people=dict( - arg_type="list", - elements="dict", - options=dict( - name=dict(arg_type="str", required=True, default="testname"), - age=dict(arg_type="int", required=False), - ), - ) - ) + arg_defs = { + "people":{ + "arg_type":"list", + "elements":"dict", + "options":{ + "name":{ + "arg_type":"str", + "required":True, + "default":"testname" + }, + "age":{ + "arg_type":"int", + "required":False + }, + }, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args( { @@ -437,16 +526,23 @@ def to_string(value, dependencies): def test_list_of_dicts_nested_function_arg_type(): - arg_defs = dict( - people=dict( - arg_type="list", - elements="dict", - options=dict( - name=dict(arg_type="str", required=True, default="testname"), - age=dict(arg_type=to_string, required=False), - ), - ) - ) + arg_defs = { + "people":{ + "arg_type":"list", + "elements":"dict", + "options":{ + "name":{ + "arg_type":"str", + "required":True, + "default":"testname" + }, + "age":{ + "arg_type":to_string, + "required":False + }, + }, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args( { @@ -465,21 +561,33 @@ def test_list_of_dicts_nested_function_arg_type(): def test_dict_of_dict(): - arg_defs = dict( - person=dict( - arg_type="dict", - options=dict( - name=dict(arg_type="str", required=True, default="testname"), - age=dict(arg_type="int", required=False), - address=dict( - arg_type="dict", - options=dict( - street=dict(arg_type="str"), number=dict(arg_type="int") - ), - ), - ), - ) - ) + arg_defs = { + "person":{ + "arg_type":"dict", + "options":{ + "name":{ + "arg_type":"str", + "required":True, + "default":"testname" + }, + "age":{ + "arg_type":"int", + "required":False + }, + "address":{ + "arg_type":"dict", + "options":{ + "street":{ + "arg_type":"str" + }, + "number":{ + "arg_type":"int" + } + }, + }, + }, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args( { @@ -494,21 +602,33 @@ def test_dict_of_dict(): def test_dict_of_dict_fail_on_nested_arg_type(): - arg_defs = dict( - person=dict( - arg_type="dict", - options=dict( - name=dict(arg_type="str", required=True, default="testname"), - age=dict(arg_type="int", required=False), - address=dict( - arg_type="dict", - options=dict( - street=dict(arg_type="str"), number=dict(arg_type="str") - ), - ), - ), - ) - ) + arg_defs = { + "person":{ + "arg_type":"dict", + "options":{ + "name":{ + "arg_type":"str", + "required":True, + "default":"testname" + }, + "age":{ + "arg_type":"int", + "required":False + }, + "address":{ + "arg_type":"dict", + "options":{ + "street":{ + "arg_type":"str" + }, + "number":{ + "arg_type":"str" + } + }, + }, + }, + } + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args( @@ -523,97 +643,163 @@ def test_dict_of_dict_fail_on_nested_arg_type(): def test_cyclic_dependency_in_nested_dict(): - arg_defs = dict( - person=dict( - arg_type="dict", - options=dict( - name=dict( - arg_type="str", - required=True, - default="testname", - dependencies=["age"], - ), - age=dict(arg_type="int", required=False, dependencies=["name"]), - address=dict( - arg_type="dict", - options=dict( - street=dict(arg_type="str"), number=dict(arg_type="str") - ), - ), - ), - ) - ) + arg_defs = { + "person":{ + "arg_type":"dict", + "options":{ + "name":{ + "arg_type":"str", + "required":True, + "default":"testname", + "dependencies":["age"], + }, + "age":{ + "arg_type":"int", + "required":False, + "dependencies":["name"] + }, + "address":{ + "arg_type":"dict", + "options":{ + "street":{ + "arg_type":"str" + }, + "number":{ + "arg_type":"str" + } + }, + }, + }, + } + } with pytest.raises(RuntimeError): BetterArgParser(arg_defs) def test_invalid_dependency_independent_does_not_exist(): - arg_defs = dict( - person=dict(arg_type="str", dependencies=["nonexistent"]), - animal=dict(arg_type="str", dependencies=["person"]), - ) + arg_defs = { + "person":{ + "arg_type":"str", + "dependencies":["nonexistent"] + }, + "animal":{ + "arg_type":"str", + "dependencies":["person"] + }, + } with pytest.raises(ValueError): BetterArgParser(arg_defs) def test_choices_success(): - arg_defs = dict(person=dict(arg_type="str", choices=["blake", "ping", "crystal"])) + arg_defs = { + "person":{ + "arg_type":"str", + "choices":["blake", "ping", "crystal"] + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"person": "blake"}) assert result.get("person") == "blake" def test_choices_fail(): - arg_defs = dict(person=dict(arg_type="str", choices=["blake", "ping", "crystal"])) + arg_defs = { + "person":{ + "arg_type":"str", + "choices":["blake", "ping", "crystal"] + } + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({"person": "bob"}) -arg_defs = dict( - name=dict( - arg_type="str", required=True, default="samplename", dependencies=["time"] - ), - date=dict(arg_type="str", default="may 1, 2020", dependencies=["name"]), - time=dict(arg_type="int", default="3945297", dependencies=["date"]), -) +arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + "default":"samplename", + "dependencies":["time"] + }, + "date":{ + "arg_type":"str", + "default":"may 1, 2020", + "dependencies":["name"] + }, + "time":{ + "arg_type":"int", + "default":"3945297", + "dependencies":["date"] + }, +} def test_second_level_defaults(): - arg_defs = dict( - name=dict(arg_type="str", required=True, default="samplename"), - date=dict( - arg_type="dict", - options=dict( - month=dict(arg_type="str", default="hello"), - day=dict(arg_type="int", default=1), - ), - ), - ) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + "default":"samplename" + }, + "date":{ + "arg_type":"dict", + "options":{ + "month":{ + "arg_type":"str", + "default":"hello" + }, + "day":{ + "arg_type":"int", + "default":1 + }, + }, + }, + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"name": "blake", "date": {"month": "may"}}) assert result.get("date").get("day") is not None def test_mutually_exclusive_parameters_two_values_set_top_level(): - arg_defs = dict( - name=dict(arg_type="str", required=True, default="samplename"), - date=dict(arg_type="str", default="may 1, 2020"), - time=dict(arg_type="int", default="3945297"), - mutually_exclusive=[["date", "time"]], - ) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + "default":"samplename" + }, + "date":{ + "arg_type":"str", + "default":"may 1, 2020" + }, + "time":{ + "arg_type":"int", + "default":"3945297" + }, + "mutually_exclusive":[["date", "time"]], + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({"date": "tuesday", "time": 5000}) def test_mutually_exclusive_parameters_two_values_set_top_level_defaults(): - arg_defs = dict( - name=dict(arg_type="str", required=True, default="samplename"), - date=dict(arg_type="str", default="may 1, 2020"), - time=dict(arg_type="int", default="3945297"), - mutually_exclusive=[["date", "time"]], - ) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + "default":"samplename" + }, + "date":{ + "arg_type":"str", + "default":"may 1, 2020" + }, + "time":{ + "arg_type":"int", + "default":"3945297" + }, + "mutually_exclusive":[["date", "time"]], + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({}) @@ -625,16 +811,21 @@ def special_names_get_uppercase(value, dependencies, kwargs): return value.upper() return value - arg_defs = dict( - name=dict( - arg_type=special_names_get_uppercase, - required=True, - default="samplename", - special_names=["blake", "demetri", "ping", "crystal", "asif", "luke"], - ), - date=dict(arg_type="str", default="may 1, 2020"), - time=dict(arg_type="int"), - ) + arg_defs = { + "name":{ + "arg_type":special_names_get_uppercase, + "required":True, + "default":"samplename", + "special_names":["blake", "demetri", "ping", "crystal", "asif", "luke"], + }, + "date":{ + "arg_type":"str", + "default":"may 1, 2020" + }, + "time":{ + "arg_type":"int" + }, + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"name": "blake"}) assert result.get("name") == "BLAKE" @@ -648,21 +839,26 @@ def special_user(value, dependencies, kwargs): return True return False - arg_defs = dict( - name=dict( - arg_type="str", - required=True, - default="samplename", - ), - date=dict(arg_type="str", default="may 1, 2020"), - time=dict(arg_type="int"), - age=dict( - arg_type="int", - required=special_user, - dependencies=["name"], - special_names=["blake", "demetri", "ping", "crystal", "asif", "luke"], - ), - ) + arg_defs = { + "name":{ + "arg_type":"str", + "required":True, + "default":"samplename", + }, + "date":{ + "arg_type":"str", + "default":"may 1, 2020" + }, + "time":{ + "arg_type":"int" + }, + "age":{ + "arg_type":"int", + "required":special_user, + "dependencies":["name"], + "special_names":["blake", "demetri", "ping", "crystal", "asif", "luke"], + }, + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({"name": "blake"}) @@ -676,15 +872,15 @@ def special_names_get_uppercase(value, dependencies, kwargs): return value.upper() return value - arg_defs = dict( - person=dict( - arg_type="dict", - options=dict( - name=dict( - arg_type=special_names_get_uppercase, - required=True, - default="testname", - special_names=[ + arg_defs = { + "person":{ + "arg_type":"dict", + "options":{ + "name":{ + "arg_type":special_names_get_uppercase, + "required":True, + "default":"testname", + "special_names":[ "blake", "demetri", "ping", @@ -692,17 +888,25 @@ def special_names_get_uppercase(value, dependencies, kwargs): "asif", "luke", ], - ), - age=dict(arg_type="int", required=False), - address=dict( - arg_type="dict", - options=dict( - street=dict(arg_type="str"), number=dict(arg_type="int") - ), - ), - ), - ) - ) + }, + "age":{ + "arg_type":"int", + "required":False + }, + "address":{ + "arg_type":"dict", + "options":{ + "street":{ + "arg_type":"str" + }, + "number":{ + "arg_type":"int" + } + }, + }, + }, + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args( { @@ -756,7 +960,11 @@ def special_names_get_uppercase(value, dependencies, kwargs): ], ) def test_data_set_type_no_invalid(arg_type, name): - arg_defs = dict(dsname=dict(arg_type=arg_type)) + arg_defs = { + "dsname":{ + "arg_type":arg_type + } + } parser = BetterArgParser(arg_defs) result = parser.parse_args({"dsname": name}) assert result.get("dsname") == name @@ -795,7 +1003,11 @@ def test_data_set_type_no_invalid(arg_type, name): ], ) def test_data_set_type_invalid(arg_type, name): - arg_defs = dict(dsname=dict(arg_type=arg_type)) + arg_defs = { + "dsname":{ + "arg_type":arg_type + } + } parser = BetterArgParser(arg_defs) with pytest.raises(ValueError): parser.parse_args({"dsname": name}) From 4f6d5faaea6914ea0e406e6e5e16efd28416ceb5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 13 Jun 2024 10:26:39 -0600 Subject: [PATCH 410/495] [Bugfix][1377]output_mvs_raw_gds_positive_was_false_positive (#1541) * Fix quick issue * Fix output of mvs_raw +1 gds * Fix false positive * Add fragment * Fix sanity * Adder disposition new again * Add coments * Fix disposition * Fix sanity * Fix sanity --- ...vs_raw_gds_positive_was_false_positive.yml | 3 ++ plugins/modules/zos_mvs_raw.py | 36 ++++++++++++++----- 2 files changed, 31 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/1541-output_mvs_raw_gds_positive_was_false_positive.yml diff --git a/changelogs/fragments/1541-output_mvs_raw_gds_positive_was_false_positive.yml b/changelogs/fragments/1541-output_mvs_raw_gds_positive_was_false_positive.yml new file mode 100644 index 000000000..3ed2efe0a --- /dev/null +++ b/changelogs/fragments/1541-output_mvs_raw_gds_positive_was_false_positive.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_mvs_raw - Added support for GDG and GDS relative positive name notation to use a data set. + (https://github.com/ansible-collections/ibm_zos_core/pull/1541). \ No newline at end of file diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 45f89e023..b06dd2ce0 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -1637,12 +1637,14 @@ RawInputDefinition, RawOutputDefinition, ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ZOAUImportError from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import data_set from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) import re +import traceback from ansible.module_utils.six import PY3 if PY3: @@ -1650,6 +1652,10 @@ else: from pipes import quote +try: + from zoautil_py import datasets +except Exception: + datasets = ZOAUImportError(traceback.format_exc()) ENCODING_ENVIRONMENT_VARS = {"_BPXK_AUTOCVT": "OFF"} @@ -2580,9 +2586,11 @@ def get_dd_name_and_key(dd): key = "" if dd.get("dd_data_set"): dd_name = dd.get("dd_data_set").get("dd_name") - data_set_name = resolve_data_set_names(dd.get("dd_data_set").get("data_set_name"), - dd.get("dd_data_set").get("disposition")) + data_set_name, disposition = resolve_data_set_names(dd.get("dd_data_set").get("data_set_name"), + dd.get("dd_data_set").get("disposition"), + dd.get("dd_data_set").get("type")) dd.get("dd_data_set")["data_set_name"] = data_set_name + dd.get("dd_data_set")["disposition"] = disposition key = "dd_data_set" elif dd.get("dd_unix"): dd_name = dd.get("dd_unix").get("dd_name") @@ -2627,7 +2635,7 @@ def set_extra_attributes_in_dd(dd, tmphlq, key): return dd -def resolve_data_set_names(dataset, disposition): +def resolve_data_set_names(dataset, disposition, type): """Resolve cases for data set names as relative gds or positive that could be accepted if disposition is new. Parameters @@ -2636,15 +2644,27 @@ def resolve_data_set_names(dataset, disposition): Data set name to determine if is a GDS relative name or regular name. disposition : str Disposition of data set for it creation. + type : str + Type of dataset Returns ------- str The absolute name of dataset or relative positive if disposition is new. + str + The disposition base on the system """ + if disposition: + disp = disposition + else: + disp = "shr" + if data_set.DataSet.is_gds_relative_name(dataset): if data_set.DataSet.is_gds_positive_relative_name(dataset): - if disposition and disposition == "new": - return dataset + if disp == "new": + if type: + return str(datasets.create(dataset, type).name), "shr" + else: + return str(datasets.create(dataset, "seq").name), "shr" else: raise ("To generate a new GDS as {0} disposition 'new' is required.".format(dataset)) else: @@ -2653,14 +2673,14 @@ def resolve_data_set_names(dataset, disposition): ) src = data.name if data.is_gds_active: - if disposition and disposition == "new": + if disposition and disp == "new": raise ("GDS {0} already created, incorrect parameters for disposition and data_set_name".format(src)) else: - return src + return src, disposition else: raise ("{0} does not exist".format(src)) else: - return dataset + return dataset, disp def build_data_definition(dd): From 1caf6248893ddf811b2a5e6eab6ba43d8d6322ce Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 13 Jun 2024 10:32:41 -0600 Subject: [PATCH 411/495] [Enabler][test_zos_blockinfile_func] Update test suites on functional/module-utils/test_zos_blockinfile_func.py to be pylint correct (#1449) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_backup restore.py to be pylint correct * Revert a mistake commit * Update test suites on functional/moduls/test_zos_blockinfile_func.py to be pylint correct * Update test suites on functional/module-utils/test_zos_blockinfile_func.py to be pylint correct * Reverse changes * Fix typos --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> --- .../modules/test_zos_blockinfile_func.py | 453 +++++++++++++----- 1 file changed, 331 insertions(+), 122 deletions(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 713a9873e..0bc40866d 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -12,13 +12,12 @@ # limitations under the License. from __future__ import absolute_import, division, print_function -from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name -from shellescape import quote import time import re -import pytest import inspect -import os +from shellescape import quote +import pytest +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name __metaclass__ = type @@ -31,7 +30,7 @@ { char dsname[ strlen(argv[1]) + 4]; sprintf(dsname, \\\"//'%s'\\\", argv[1]); - FILE* member; + file* member; member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); @@ -421,8 +420,10 @@ # END ANSIBLE MANAGED BLOCK""" """ -Note: zos_encode module uses USS cp command for copying from USS file to MVS data set which only supports IBM-1047 charset. -I had to develop and use a new tool for converting and copying to data set in order to set up environment for tests to publish results on Jira. +Note: zos_encode module uses USS cp command for copying +from USS file to MVS data set which only supports IBM-1047 charset. +I had to develop and use a new tool for converting and copying +to data set in order to set up environment for tests to publish results on Jira. Until the issue be addressed I disable related tests. """ ENCODING = ['IBM-1047', 'ISO8859-1', 'UTF-8'] @@ -436,34 +437,34 @@ USS_BACKUP_FILE = "/tmp/backup.tmp" BACKUP_OPTIONS = [None, "BLOCKIF.TEST.BACKUP", "BLOCKIF.TEST.BACKUP(BACKUP)"] -def set_uss_environment(ansible_zos_module, CONTENT, FILE): +def set_uss_environment(ansible_zos_module, content, file): hosts = ansible_zos_module - hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_BLOCKINFILE)) - hosts.all.file(path=FILE, state="touch") - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, FILE)) + hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_BLOCKINFILE}") + hosts.all.file(path=file, state="touch") + hosts.all.shell(cmd=f"echo \"{content}\" > {file}") def remove_uss_environment(ansible_zos_module): hosts = ansible_zos_module hosts.all.shell(cmd="rm -rf" + TEST_FOLDER_BLOCKINFILE) -def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT): +def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) - hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) - if DS_TYPE in ["pds", "pdse"]: - DS_FULL_NAME = DS_NAME + "(MEM)" - hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") + hosts.all.zos_data_set(name=ds_name, type=ds_type) + if ds_type in ["pds", "pdse"]: + ds_full_name = ds_name + "(MEM)" + hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: - DS_FULL_NAME = DS_NAME - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), DS_FULL_NAME) - hosts.all.shell(cmd=cmdStr) - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) - return DS_FULL_NAME + ds_full_name = ds_name + cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " + hosts.all.shell(cmd=cmd_str) + hosts.all.shell(cmd="rm -rf " + temp_file) + return ds_full_name -def remove_ds_environment(ansible_zos_module, DS_NAME): +def remove_ds_environment(ansible_zos_module, ds_name): hosts = ansible_zos_module - hosts.all.zos_data_set(name=DS_NAME, state="absent") + hosts.all.zos_data_set(name=ds_name, state="absent") ######################### # USS test cases @@ -473,7 +474,11 @@ def remove_ds_environment(ansible_zos_module, DS_NAME): @pytest.mark.uss def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params = { + "insertafter":"ZOAU_ROOT=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present" + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -481,7 +486,6 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): params["path"] = full_path results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): - print(result) assert result.get("changed") == 1 results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): @@ -493,7 +497,11 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params = { + "insertbefore":"ZOAU_ROOT=", + "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", + "state":"present" + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -512,7 +520,11 @@ def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present" + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -531,7 +543,11 @@ def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params = { + "insertbefore":"BOF", + "block":"# this is file is for setting env vars", + "state":"present" + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -551,7 +567,11 @@ def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): hosts = ansible_zos_module # Set special parameters for the test as marker - params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params = { + "insertafter":"ZOAU_ROOT=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present" + } params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' @@ -575,7 +595,11 @@ def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): hosts = ansible_zos_module # Set special parameters for the test as marker - params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params = { + "insertbefore":"ZOAU_ROOT=", + "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", + "state":"present" + } params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' @@ -598,7 +622,11 @@ def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): hosts = ansible_zos_module # Set special parameters for the test as marker - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present" + } params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' @@ -621,7 +649,11 @@ def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): hosts = ansible_zos_module # Set special parameters for the test as marker - params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params = { + "insertbefore":"BOF", + "block":"# this is file is for setting env vars", + "state":"present" + } params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' @@ -643,7 +675,10 @@ def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_absent_defaultmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(block="", state="absent") + params = { + "block":"", + "state":"absent" + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT_DEFAULTMARKER try: @@ -662,7 +697,10 @@ def test_uss_block_absent_defaultmarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_absent_custommarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(block="", state="absent") + params = { + "block":"", + "state":"absent" + } params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' @@ -684,7 +722,11 @@ def test_uss_block_absent_custommarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params = { + "insertafter":"PYTHON_HOME=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present" + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT_DEFAULTMARKER try: @@ -703,7 +745,11 @@ def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params = { + "insertbefore":"PYTHON_HOME=", + "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", + "state":"present" + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT_DEFAULTMARKER try: @@ -722,7 +768,11 @@ def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present" + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT_DEFAULTMARKER try: @@ -741,7 +791,11 @@ def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params = { + "insertbefore":"BOF", + "block":"# this is file is for setting env vars", + "state":"present" + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT_DEFAULTMARKER try: @@ -760,7 +814,11 @@ def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params = { + "insertafter":"PYTHON_HOME=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present" + } params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' @@ -782,7 +840,11 @@ def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params = { + "insertbefore":"PYTHON_HOME=", + "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", + "state":"present" + } params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' @@ -804,7 +866,11 @@ def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present" + } params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' @@ -826,7 +892,11 @@ def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params = { + "insertbefore":"BOF", + "block":"# this is file is for setting env vars", + "state":"present" + } params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' @@ -848,7 +918,12 @@ def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): @pytest.mark.uss def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16) + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present", + "indentation":16 + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -888,7 +963,12 @@ def test_uss_block_insert_with_doublequotes(ansible_zos_module): @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present", + "backup":True + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -910,7 +990,13 @@ def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=USS_BACKUP_FILE) + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present", + "backup":True, + "backup_name":USS_BACKUP_FILE + } full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -920,8 +1006,8 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") == 1 assert result.get("backup_name") == USS_BACKUP_FILE - cmdStr = "cat {0}".format(USS_BACKUP_FILE) - results = ansible_zos_module.all.shell(cmd=cmdStr) + cmd_str = f"cat {USS_BACKUP_FILE}" + results = ansible_zos_module.all.shell(cmd=cmd_str) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT results = hosts.all.shell(cmd="cat {0}".format(params["path"])) @@ -942,7 +1028,11 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): def test_ds_block_insertafter_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params = { + "insertafter":"ZOAU_ROOT=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -964,7 +1054,11 @@ def test_ds_block_insertafter_regex(ansible_zos_module, dstype): def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertbefore="ZOAU_ROOT=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params = { + "insertbefore":"ZOAU_ROOT=", + "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -986,7 +1080,11 @@ def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): def test_ds_block_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -1008,7 +1106,11 @@ def test_ds_block_insertafter_eof(ansible_zos_module, dstype): def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params = { + "insertbefore":"BOF", + "block":"# this is file is for setting env vars", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -1030,7 +1132,11 @@ def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="PYTHON_HOME=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params = { + "insertafter":"PYTHON_HOME=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER @@ -1052,7 +1158,11 @@ def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertbefore="PYTHON_HOME=", block="unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", state="present") + params = { + "insertbefore":"PYTHON_HOME=", + "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER @@ -1074,7 +1184,11 @@ def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER @@ -1096,7 +1210,11 @@ def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertbefore="BOF", block="# this is file is for setting env vars", state="present") + params = { + "insertbefore":"BOF", + "block":"# this is file is for setting env vars", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER @@ -1118,7 +1236,10 @@ def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): def test_ds_block_absent(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(block="", state="absent") + params = { + "block":"", + "state":"absent" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT_DEFAULTMARKER @@ -1140,20 +1261,27 @@ def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets hosts = ansible_zos_module ds_type = "seq" - params=dict(insertafter="EOF", block="export ZOAU_ROOT\n", state="present", backup=True, tmp_hlq="TMPHLQ") - kwargs = dict(backup_name=r"TMPHLQ\..") + params={ + "insertafter":"EOF", + "block":"export ZOAU_ROOT\n", + "state":"present", + "backup":True, + "tmp_hlq":"TMPHLQ" + } + kwargs = { + "backup_name":r"TMPHLQ\.." + } content = TEST_CONTENT try: ds_full_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_full_name hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) - hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") + cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " + hosts.all.shell(cmd=cmd_str) hosts.all.shell(cmd="rm -rf " + "/tmp/zos_lineinfile/") - results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(ds_full_name)) + results = hosts.all.shell(cmd=f"cat \"//'{ds_full_name}'\" | wc -l ") for result in results.contacted.values(): - print(result) assert int(result.get("stdout")) != 0 params["path"] = ds_full_name results = hosts.all.zos_blockinfile(**params) @@ -1169,7 +1297,12 @@ def test_ds_tmp_hlq_option(ansible_zos_module): def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", indentation=16) + params = { + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present", + "indentation":16 + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -1193,7 +1326,11 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup hosts = ansible_zos_module ds_type = dstype backup_ds_name = "" - params = dict(block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True) + params = { + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present", + "backup":True + } if backup_name: params["backup_name"] = backup_name ds_name = get_tmp_ds_name() @@ -1225,56 +1362,70 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype default_data_set_name = get_tmp_ds_name() - params = dict(path="",insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=True) - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + params = { + "path":"", + "insertafter":"ZOAU_ROOT=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present", + "force":True + } + member_1, member_2 = "MEM1", "MEM2" + temp_file = f"/tmp/{member_2}" content = TEST_CONTENT if ds_type == "seq": - params["path"] = default_data_set_name+".{0}".format(MEMBER_2) + params["path"] = f"{default_data_set_name}.{member_2}" else: - params["path"] = default_data_set_name+"({0})".format(MEMBER_2) + params["path"] = f"{default_data_set_name}({member_2})" try: # set up: - hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + name=default_data_set_name, + state="present", + type=ds_type, + replace=True + ) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") hosts.all.zos_data_set( batch=[ - { "name": default_data_set_name + "({0})".format(MEMBER_1), - "type": "member", "state": "present", "replace": True, }, + { + "name": f"{default_data_set_name}({member_1})", + "type": "member", + "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, ] ) # write memeber to verify cases if ds_type in ["pds", "pdse"]: - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), params["path"]) else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) - hosts.all.shell(cmd=cmdStr) + cmd_str = "cp {0} \"//'{1}'\" ".format(quote(temp_file), params["path"]) + hosts.all.shell(cmd=cmd_str) results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) hosts.all.file(path="/tmp/disp_shr/", state="directory") - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format( - call_c_jcl.format(default_data_set_name, MEMBER_1), - '/tmp/disp_shr/call_c_pgm.jcl')) + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + hosts.all.shell( + cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1)}\""+ + " > /tmp/disp_shr/call_c_pgm.jcl" + ) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) # call lineinfile to see results results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): - assert result.get("changed") == True - results = hosts.all.shell(cmd=r"""cat "//'{0}'" """.format(params["path"])) + assert result.get("changed") is True + results = hosts.all.shell(cmd=f"""cat "//'{params["path"]}'" """) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + hosts.all.shell(cmd="rm -rf " + temp_file) ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r /tmp/disp_shr') hosts.all.zos_data_set(name=default_data_set_name, state="absent") @@ -1366,15 +1517,24 @@ def test_special_characters_ds_insert_block(ansible_zos_module): def test_uss_encoding(ansible_zos_module, encoding): hosts = ansible_zos_module insert_data = "Insert this string" - params = dict(insertafter="SIMPLE", block=insert_data, state="present") + params = { + "insertafter":"SIMPLE", + "block":insert_data, + "state":"present" + } params["encoding"] = encoding full_path = TEST_FOLDER_BLOCKINFILE + encoding content = "SIMPLE LINE TO VERIFY" try: - hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_BLOCKINFILE)) + hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_BLOCKINFILE}") hosts.all.file(path=full_path, state="touch") - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) - hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"]) + hosts.all.shell(cmd=f"echo \"{content}\" > {full_path}") + hosts.all.zos_encode( + src=full_path, + dest=full_path, + from_encoding="IBM-1047", + to_encoding=params["encoding"] + ) params["path"] = full_path results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): @@ -1393,29 +1553,43 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): hosts = ansible_zos_module ds_type = dstype insert_data = "Insert this string" - params = dict(insertafter="SIMPLE", block=insert_data, state="present") + params = { + "insertafter":"SIMPLE", + "block":insert_data, + "state":"present" + } params["encoding"] = encoding ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = "SIMPLE LINE TO VERIFY" try: - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) - hosts.all.zos_encode(src=temp_file, dest=temp_file, from_encoding="IBM-1047", to_encoding=params["encoding"]) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") + hosts.all.zos_encode( + src=temp_file, + dest=temp_file, + from_encoding="IBM-1047", + to_encoding=params["encoding"] + ) hosts.all.zos_data_set(name=ds_name, type=ds_type) if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) + cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: ds_full_name = ds_name - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " + hosts.all.shell(cmd=cmd_str) hosts.all.shell(cmd="rm -rf " + temp_file) params["path"] = ds_full_name results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - hosts.all.zos_encode(src=ds_full_name, dest=ds_full_name, from_encoding=params["encoding"], to_encoding="IBM-1047") + hosts.all.zos_encode( + src=ds_full_name, + dest=ds_full_name, + from_encoding=params["encoding"], + to_encoding="IBM-1047" + ) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING @@ -1431,7 +1605,11 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): @pytest.mark.ds def test_not_exist_ds_block_insertafter_regex(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params = { + "insertafter":"ZOAU_ROOT=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present" + } params["path"] = "BIFTEST.NOTEXIST.SEQ" results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): @@ -1442,7 +1620,11 @@ def test_not_exist_ds_block_insertafter_regex(ansible_zos_module): def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): hosts = ansible_zos_module ds_type = 'seq' - params=dict(insertafter="EOF", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present") + params={ + "insertafter":"EOF", + "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", + "state":"present" + } params["insertafter"] = 'SOME_NON_EXISTING_PATTERN' ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name @@ -1463,7 +1645,11 @@ def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): @pytest.mark.ds def test_ds_block_insertafter_regex_wrongmarker(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params = { + "insertafter":"ZOAU_ROOT=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present" + } params["path"] = "BIFTEST.NOTEXIST.SEQ" params["marker"] = '# MANAGED BLOCK' results = hosts.all.zos_blockinfile(**params) @@ -1476,9 +1662,12 @@ def test_ds_block_insertafter_regex_wrongmarker(ansible_zos_module): def test_ds_not_supported(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + params = { + "insertafter":"ZOAU_ROOT=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present" + } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name try: ds_name = ds_name.upper() + "." + ds_type results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') @@ -1500,45 +1689,65 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype default_data_set_name = get_tmp_ds_name() - params = dict(path="", insertafter="ZOAU_ROOT=", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present", force=False) - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - params["path"] = default_data_set_name+"({0})".format(MEMBER_2) + params = { + "path":"", + "insertafter":"ZOAU_ROOT=", + "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", + "state":"present", + "force":False + } + member_1, member_2 = "MEM1", "MEM2" + temp_file = f"/tmp/{member_2}" + params["path"] = f"{default_data_set_name}({member_2})" content = TEST_CONTENT try: # set up: - hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + name=default_data_set_name, + state="present", + type=ds_type, + replace=True + ) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") hosts.all.zos_data_set( batch=[ - { "name": default_data_set_name + "({0})".format(MEMBER_1), - "type": "member", "state": "present", "replace": True, }, - { "name": params["path"], "type": "member", - "state": "present", "replace": True, }, + { + "name": f"{default_data_set_name}({member_1})", + "type": "member", + "state": "present", + "replace": True, + }, + { + "name": params["path"], + "type": "member", + "state": "present", + "replace": True, + }, ] ) - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) - hosts.all.shell(cmd=cmdStr) + cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file) ,params["path"]) + hosts.all.shell(cmd=cmd_str) results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) hosts.all.file(path="/tmp/disp_shr/", state="directory") - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format( - call_c_jcl.format(default_data_set_name, MEMBER_1), - '/tmp/disp_shr/call_c_pgm.jcl')) + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + hosts.all.shell( + cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1)}\""+ + " > /tmp/disp_shr/call_c_pgm.jcl" + ) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) # call lineinfile to see results results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): - assert result.get("changed") == False - assert result.get("failed") == True + assert result.get("changed") is False + assert result.get("failed") is True finally: ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r /tmp/disp_shr') hosts.all.zos_data_set(name=default_data_set_name, state="absent") From c6654c449365018fcdf5fc56ced681131402ee3a Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 13 Jun 2024 10:33:56 -0600 Subject: [PATCH 412/495] [Enabler][test_zos_dataset_func] Update test suites on functional/module-utils/test_zos_dataset_func.py to be pylint correct (#1456) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Fix typos --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> --- .../modules/test_zos_data_set_func.py | 198 ++++++++++-------- 1 file changed, 106 insertions(+), 92 deletions(-) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index b8b4fb81a..16e3c0051 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -15,13 +15,14 @@ __metaclass__ = type -import pytest import time -import subprocess -from shlex import quote +from pipes import quote from pprint import pprint +import pytest +# pylint: disable-next=import-error from ibm_zos_core.tests.helpers.volumes import Volume_Handler +# pylint: disable-next=import-error from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name # TODO: determine if data set names need to be more generic for testcases @@ -135,7 +136,7 @@ def retrieve_data_set_names(results): for result in results.contacted.values(): if len(result.get("names", [])) > 0: for name in result.get("names"): - data_set_names.append(name) + data_set_names.append(name) return data_set_names def print_results(results): @@ -166,7 +167,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst # verify data set creation was successful for result in results.contacted.values(): - if(result.get("jobs")[0].get("ret_code") is None): + if result.get("jobs")[0].get("ret_code") is None: submitted_job_id = result.get("jobs")[0].get("job_id") assert submitted_job_id is not None results = hosts.all.zos_job_output(job_id=submitted_job_id) @@ -196,7 +197,8 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst finally: # clean up hosts.all.file(path=TEMP_PATH, state="absent") - # Added volumes to force a catalog in case they were somehow uncataloged to avoid an duplicate on volume error + # Added volumes to force a catalog in case they were + # somehow uncataloged to avoid an duplicate on volume error hosts.all.zos_data_set(name=dataset, state="absent", volumes=volume_1) @@ -336,9 +338,13 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy @pytest.mark.parametrize( "jcl", [PDS_CREATE_JCL, KSDS_CREATE_JCL, RRDS_CREATE_JCL, ESDS_CREATE_JCL, LDS_CREATE_JCL], - ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] + ids=['PDS_CREATE_JCL', 'KSDS_CREATE_JCL', 'RRDS_CREATE_JCL', 'ESDS_CREATE_JCL', 'LDS_CREATE_JCL'] ) -def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present(ansible_zos_module, jcl, volumes_on_systems): +def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present( + ansible_zos_module, + jcl, + volumes_on_systems +): volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() volume_2 = volumes.get_available_vol() @@ -527,13 +533,18 @@ def test_batch_data_set_and_member_creation(ansible_zos_module): //""" def test_data_member_force_delete(ansible_zos_module): - MEMBER_1, MEMBER_2, MEMBER_3, MEMBER_4 = "MEM1", "MEM2", "MEM3", "MEM4" + member_1, member_2, member_3, member_4 = "MEM1", "MEM2", "MEM3", "MEM4" try: hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) + default_data_set_name = get_tmp_ds_name(2, 2) # set up: # create pdse - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="present", type="pdse", replace=True) + results = hosts.all.zos_data_set( + name=default_data_set_name, + state="present", + type="pdse", + replace=True + ) for result in results.contacted.values(): assert result.get("changed") is True @@ -541,25 +552,25 @@ def test_data_member_force_delete(ansible_zos_module): results = hosts.all.zos_data_set( batch=[ { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_1), + "name": f"{default_data_set_name}({member_1})", "type": "member", "state": "present", "replace": True, }, { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_2), + "name": f"{default_data_set_name}({member_2})", "type": "member", "state": "present", "replace": True, }, { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_3), + "name": f"{default_data_set_name}({member_3})", "type": "member", "state": "present", "replace": True, }, { - "name": DEFAULT_DATA_SET_NAME + "({0})".format(MEMBER_4), + "name": f"{default_data_set_name}({member_4})", "type": "member", "state": "present", "replace": True, @@ -573,7 +584,7 @@ def test_data_member_force_delete(ansible_zos_module): # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) hosts.all.zos_copy( - content=call_c_jcl.format(DEFAULT_DATA_SET_NAME, MEMBER_1), + content=call_c_jcl.format(default_data_set_name, member_1), dest='/tmp/disp_shr/call_c_pgm.jcl', force=True ) @@ -585,9 +596,9 @@ def test_data_member_force_delete(ansible_zos_module): # pause to ensure c code acquires lock time.sleep(5) - # non-force attempt to delete MEMBER_2 - should fail since pdse in in use. + # non-force attempt to delete member_2 - should fail since pdse in in use. results = hosts.all.zos_data_set( - name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_2), + name=f"{default_data_set_name}({member_2})", state="absent", type="member" ) @@ -595,19 +606,22 @@ def test_data_member_force_delete(ansible_zos_module): assert result.get("failed") is True assert "DatasetMemberDeleteError" in result.get("msg") - # attempt to delete MEMBER_3 with force option. + # attempt to delete member_3 with force option. results = hosts.all.zos_data_set( - name="{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_3), state="absent", type="member", force=True + name=f"{default_data_set_name}({member_3})", + state="absent", + type="member", + force=True ) for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - # attempt to delete MEMBER_4 with force option in batch mode. + # attempt to delete member_4 with force option in batch mode. results = hosts.all.zos_data_set( batch=[ { - "name": "{0}({1})".format(DEFAULT_DATA_SET_NAME, MEMBER_4), + "name": f"{default_data_set_name}({member_4})", "state": "absent", "type": "member", "force": True @@ -619,12 +633,12 @@ def test_data_member_force_delete(ansible_zos_module): assert result.get("module_stderr") is None # confirm member deleted with mls -- mem1 and mem2 should be present but no mem3 and no mem4 - results = hosts.all.command(cmd="mls {0}".format(DEFAULT_DATA_SET_NAME)) + results = hosts.all.command(cmd=f"mls {default_data_set_name}") for result in results.contacted.values(): - assert MEMBER_1 in result.get("stdout") - assert MEMBER_2 in result.get("stdout") - assert MEMBER_3 not in result.get("stdout") - assert MEMBER_4 not in result.get("stdout") + assert member_1 in result.get("stdout") + assert member_2 in result.get("stdout") + assert member_3 not in result.get("stdout") + assert member_4 not in result.get("stdout") finally: # extract pid @@ -632,20 +646,20 @@ def test_data_member_force_delete(ansible_zos_module): # kill process - release lock - this also seems to end the job pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd=f"kill 9 {pid.strip()}") # clean up c code/object/executable files, jcl hosts.all.shell(cmd='rm -r /tmp/disp_shr') # remove pdse - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") def test_repeated_operations(ansible_zos_module): try: hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) - DEFAULT_DATA_SET_NAME_WITH_MEMBER = DEFAULT_DATA_SET_NAME + "(MEM)" + default_data_set_name = get_tmp_ds_name(2, 2) + default_data_set_name_with_member = default_data_set_name + "(MEM)" results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=default_data_set_name, type="pds", space_primary=5, space_type="cyl", @@ -658,7 +672,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=default_data_set_name, type="pds", replace=True, ) @@ -668,7 +682,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", replace=True + name=default_data_set_name_with_member, type="member", replace=True ) for result in results.contacted.values(): @@ -676,7 +690,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member" + name=default_data_set_name_with_member, type="member" ) for result in results.contacted.values(): @@ -684,7 +698,7 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", state="absent" + name=default_data_set_name_with_member, type="member", state="absent" ) for result in results.contacted.values(): @@ -692,14 +706,14 @@ def test_repeated_operations(ansible_zos_module): assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME_WITH_MEMBER, type="member", state="absent" + name=default_data_set_name_with_member, type="member", state="absent" ) for result in results.contacted.values(): assert result.get("changed") is False assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module, volumes_on_systems): @@ -708,10 +722,10 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module, volume_2 = volumes.get_available_vol() try: hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + default_data_set_name = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=default_data_set_name, state="absent") results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=default_data_set_name, type="seq", space_primary=5, space_type="cyl", @@ -722,13 +736,13 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module, assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=default_data_set_name, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=default_data_set_name, state="cataloged", volumes=[volume_1, volume_2], ) @@ -736,7 +750,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_nonvsam(ansible_zos_module, assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, volumes_on_systems): @@ -745,10 +759,10 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, vo volume_2 = volumes.get_available_vol() try: hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + default_data_set_name = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=default_data_set_name, state="absent") results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=default_data_set_name, type="ksds", key_length=5, key_offset=0, @@ -760,13 +774,13 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, vo assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="uncataloged") + results = hosts.all.zos_data_set(name=default_data_set_name, state="uncataloged") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=default_data_set_name, state="cataloged", volumes=[volume_1, volume_2], ) @@ -774,7 +788,7 @@ def test_multi_volume_creation_uncatalog_and_catalog_vsam(ansible_zos_module, vo assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") def test_data_set_temp_data_set_name(ansible_zos_module): @@ -802,30 +816,30 @@ def test_data_set_temp_data_set_name(ansible_zos_module): def test_data_set_temp_data_set_name_batch(ansible_zos_module): try: hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name() - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + default_data_set_name = get_tmp_ds_name() + hosts.all.zos_data_set(name=default_data_set_name, state="absent") results = hosts.all.zos_data_set( batch=[ - dict( - state="present", - ), - dict( - state="present", - ), - dict( - state="present", - ), - dict( - name=DEFAULT_DATA_SET_NAME, - state="present" - ), + { + "state":"present", + }, + { + "state":"present", + }, + { + "state":"present", + }, + { + "name":default_data_set_name, + "state":"present" + }, ] ) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") data_set_names = retrieve_data_set_names(results) assert len(data_set_names) == 4 for name in data_set_names: - if name != DEFAULT_DATA_SET_NAME: + if name != default_data_set_name: results2 = hosts.all.zos_data_set(name=name, state="absent") for result in results2.contacted.values(): assert result.get("changed") is True @@ -834,7 +848,7 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") if isinstance(data_set_names, list): for name in data_set_names: results2 = hosts.all.zos_data_set(name=name, state="absent") @@ -847,9 +861,9 @@ def test_data_set_temp_data_set_name_batch(ansible_zos_module): def test_filesystem_create_and_mount(ansible_zos_module, filesystem): fulltest = True hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(1, 1) + default_data_set_name = get_tmp_ds_name(1, 1) try: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") if filesystem == "hfs": result0 = hosts.all.shell(cmd="zinfo -t sys") @@ -863,19 +877,19 @@ def test_filesystem_create_and_mount(ansible_zos_module, filesystem): if fulltest: hosts = ansible_zos_module - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") - results = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, type=filesystem) + hosts.all.zos_data_set(name=default_data_set_name, state="absent") + results = hosts.all.zos_data_set(name=default_data_set_name, type=filesystem) temp_dir_name = make_tempfile(hosts, directory=True) results2 = hosts.all.command( cmd="usr/sbin/mount -t {0} -f {1} {2}".format( - filesystem, DEFAULT_DATA_SET_NAME, temp_dir_name + filesystem, default_data_set_name, temp_dir_name ) ) - results3 = hosts.all.shell(cmd="cd {0} ; df .".format(temp_dir_name)) + results3 = hosts.all.shell(cmd=f"cd {temp_dir_name} ; df .") # clean up - results4 = hosts.all.command(cmd="usr/sbin/unmount {0}".format(temp_dir_name)) - results5 = hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + results4 = hosts.all.command(cmd=f"usr/sbin/unmount {temp_dir_name}") + results5 = hosts.all.zos_data_set(name=default_data_set_name, state="absent") results6 = hosts.all.file(path=temp_dir_name, state="absent") for result in results.contacted.values(): @@ -887,7 +901,7 @@ def test_filesystem_create_and_mount(ansible_zos_module, filesystem): for result in results3.contacted.values(): assert result.get("changed") is True assert result.get("stderr") == "" - assert DEFAULT_DATA_SET_NAME.upper() in result.get("stdout", "") + assert default_data_set_name.upper() in result.get("stdout", "") for result in results4.contacted.values(): assert result.get("changed") is True assert result.get("stderr") == "" @@ -898,15 +912,15 @@ def test_filesystem_create_and_mount(ansible_zos_module, filesystem): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") def test_data_set_creation_zero_values(ansible_zos_module): try: hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) + default_data_set_name = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=default_data_set_name, state="present", type="ksds", replace=True, @@ -919,14 +933,14 @@ def test_data_set_creation_zero_values(ansible_zos_module): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") def test_data_set_creation_with_tmp_hlq(ansible_zos_module): try: tmphlq = "ANSIBLE" hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) + default_data_set_name = get_tmp_ds_name(2, 2) results = hosts.all.zos_data_set(state="present", tmp_hlq=tmphlq) dsname = None for result in results.contacted.values(): @@ -936,7 +950,7 @@ def test_data_set_creation_with_tmp_hlq(ansible_zos_module): assert dsname[:7] == tmphlq finally: if dsname: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") @pytest.mark.parametrize( "formats", @@ -947,10 +961,10 @@ def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): volume_1 = volumes.get_available_vol() try: hosts = ansible_zos_module - DEFAULT_DATA_SET_NAME = get_tmp_ds_name(2, 2) - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + default_data_set_name = get_tmp_ds_name(2, 2) + hosts.all.zos_data_set(name=default_data_set_name, state="absent") results = hosts.all.zos_data_set( - name=DEFAULT_DATA_SET_NAME, + name=default_data_set_name, state="present", format=formats, space_primary="5", @@ -961,7 +975,7 @@ def test_data_set_f_formats(ansible_zos_module, formats, volumes_on_systems): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.zos_data_set(name=DEFAULT_DATA_SET_NAME, state="absent") + hosts.all.zos_data_set(name=default_data_set_name, state="absent") """ GDG base tests: @@ -1012,7 +1026,7 @@ def test_gdg_create_and_delete_force(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type="seq") + results = hosts.all.zos_data_set(name="{0}(+1)".format(data_set_name), state="present", type="seq") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -1033,7 +1047,7 @@ def test_gdg_create_and_delete_force(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name(2,2, symbols=True) - data_set_list = [f"{data_set_name}A", f"{data_set_name}B", f"{data_set_name}C"] + data_set_list = ["{0}A".format(data_set_name), "{0}B".format(data_set_name), "{0}C".format(data_set_name)] results = hosts.all.zos_data_set( batch=[ {"name":data_set_list[0], "state":"present", "type":"gdg", "limit":3}, @@ -1044,7 +1058,7 @@ def test_gdg_create_and_delete_force(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.shell(cmd=f"dls -tGDG ANSIBLE.*") + results = hosts.all.shell(cmd=f"dls -tGDG 'ANSIBLE.*'") for result in results.contacted.values(): for ds_name in data_set_list: assert ds_name in result.get("stdout") @@ -1060,7 +1074,7 @@ def test_create_special_chars(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.shell(cmd=f"dls ANSIBLE.*") + results = hosts.all.shell(cmd=f"dls 'ANSIBLE.*'") for result in results.contacted.values(): assert data_set_name in result.get("stdout") results = hosts.all.zos_data_set(name=data_set_name, state="absent",) @@ -1080,7 +1094,7 @@ def test_create_member_special_chars(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.shell(cmd=f"dls ANSIBLE.*") + results = hosts.all.shell(cmd=f"dls 'ANSIBLE.*'") for result in results.contacted.values(): assert data_set_name in result.get("stdout") results = hosts.all.zos_data_set(name=data_set_name, state="absent",) From 5c0a943a7485aae05c361fbd8a7b3a446bda486c Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 13 Jun 2024 10:34:41 -0600 Subject: [PATCH 413/495] [Enabler][test_zos_script_func] Update test suites on functional/modules/test_zos_lineinfile_func.py to be pylint correct (#1478) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_job_submit_func.py to be pylint correct * Remove accidental changes on functional/modules/test_zos_job_submit_func.py * Update test suites on functional/modules/test_zos_lineinfile_func.py to be pylint correct * Update test suites on functional/modules/test_zos_lineinfile_func.py on test_ds_line_does_not_insert_repeated function to be pylint correct * Update test suites on functional/modules/test_zos_script_func.py to be pylint correct * Update test_zos_script_func.py --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .../modules/test_zos_lineinfile_func.py | 453 ++++++++++++------ .../modules/test_zos_script_func.py | 98 ++-- 2 files changed, 347 insertions(+), 204 deletions(-) diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 700fefe1c..92b970040 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -12,11 +12,11 @@ # limitations under the License. from __future__ import absolute_import, division, print_function -from shellescape import quote import time import re -import pytest import inspect +import pytest +from shellescape import quote from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name @@ -31,7 +31,7 @@ { char dsname[ strlen(argv[1]) + 4]; sprintf(dsname, \\\"//'%s'\\\", argv[1]); - FILE* member; + file* member; member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); @@ -210,37 +210,37 @@ EXPECTED_ENCODING="""SIMPLE LINE TO VERIFY Insert this string""" -def set_uss_environment(ansible_zos_module, CONTENT, FILE): +def set_uss_environment(ansible_zos_module, content, file): hosts = ansible_zos_module - hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_LINEINFILE)) - hosts.all.file(path=FILE, state="touch") - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, FILE)) + hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_LINEINFILE}") + hosts.all.file(path=file, state="touch") + hosts.all.shell(cmd=f"echo \"{content}\" > {file}") def remove_uss_environment(ansible_zos_module): hosts = ansible_zos_module - hosts.all.shell(cmd="rm -rf " + TEST_FOLDER_LINEINFILE) + hosts.all.shell(cmd=f"rm -rf {TEST_FOLDER_LINEINFILE}") -def set_ds_environment(ansible_zos_module, TEMP_FILE, DS_NAME, DS_TYPE, CONTENT): +def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(CONTENT, TEMP_FILE)) - hosts.all.zos_data_set(name=DS_NAME, type=DS_TYPE) - if DS_TYPE in ["pds", "pdse"]: - DS_FULL_NAME = DS_NAME + "(MEM)" - hosts.all.zos_data_set(name=DS_FULL_NAME, state="present", type="member") - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), DS_FULL_NAME) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") + hosts.all.zos_data_set(name=ds_name, type=ds_type) + if ds_type in ["pds", "pdse"]: + ds_full_name = ds_name + "(MEM)" + hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: - DS_FULL_NAME = DS_NAME - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), DS_FULL_NAME) - hosts.all.shell(cmd=cmdStr) - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) - return DS_FULL_NAME + ds_full_name = ds_name + cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " + hosts.all.shell(cmd=cmd_str) + hosts.all.shell(cmd="rm -rf " + temp_file) + return ds_full_name -def remove_ds_environment(ansible_zos_module, DS_NAME): +def remove_ds_environment(ansible_zos_module, ds_name): hosts = ansible_zos_module - hosts.all.zos_data_set(name=DS_NAME, state="absent") + hosts.all.zos_data_set(name=ds_name, state="absent") # supported data set types -DS_TYPE = ['seq', 'pds', 'pdse'] +ds_type = ['seq', 'pds', 'pdse'] # not supported data set types NS_DS_TYPE = ['esds', 'rrds', 'lds'] # The encoding will be only use on a few test @@ -254,7 +254,11 @@ def remove_ds_environment(ansible_zos_module, DS_NAME): @pytest.mark.uss def test_uss_line_replace(ansible_zos_module): hosts = ansible_zos_module - params = dict(regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + params = { + "regexp":"ZOAU_ROOT=", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -263,7 +267,7 @@ def test_uss_line_replace(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE finally: @@ -273,7 +277,11 @@ def test_uss_line_replace(ansible_zos_module): @pytest.mark.uss def test_uss_line_insertafter_regex(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + params = { + "insertafter":"ZOAU_ROOT=", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -282,7 +290,7 @@ def test_uss_line_insertafter_regex(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: @@ -292,7 +300,11 @@ def test_uss_line_insertafter_regex(ansible_zos_module): @pytest.mark.uss def test_uss_line_insertbefore_regex(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + params = { + "insertbefore":"ZOAU_ROOT=", + "line":"unset ZOAU_ROOT", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -301,7 +313,7 @@ def test_uss_line_insertbefore_regex(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX finally: @@ -311,7 +323,11 @@ def test_uss_line_insertbefore_regex(ansible_zos_module): @pytest.mark.uss def test_uss_line_insertafter_eof(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertafter="EOF", line="export 'ZOAU_ROOT'", state="present") + params = { + "insertafter":"EOF", + "line":"export 'ZOAU_ROOT'", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -320,7 +336,7 @@ def test_uss_line_insertafter_eof(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: @@ -330,7 +346,11 @@ def test_uss_line_insertafter_eof(ansible_zos_module): @pytest.mark.uss def test_uss_line_insertbefore_bof(ansible_zos_module): hosts = ansible_zos_module - params = dict(insertbefore="BOF", line="# this is file is for setting env vars", state="present") + params = { + "insertbefore":"BOF", + "line":"# this is file is for setting env vars", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -339,7 +359,7 @@ def test_uss_line_insertbefore_bof(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF finally: @@ -349,7 +369,12 @@ def test_uss_line_insertbefore_bof(ansible_zos_module): @pytest.mark.uss def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): hosts = ansible_zos_module - params = dict(regexp="ZOAU_ROOT=", insertafter="PATH=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + params = { + "regexp":"ZOAU_ROOT=", + "insertafter":"PATH=", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -358,7 +383,7 @@ def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE finally: @@ -368,7 +393,12 @@ def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): @pytest.mark.uss def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): hosts = ansible_zos_module - params = dict(regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", state="present") + params = { + "regexp":"ZOAU_ROOT=", + "insertbefore":"PATH=", + "line":"unset ZOAU_ROOT", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -377,7 +407,7 @@ def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE finally: @@ -387,7 +417,12 @@ def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): @pytest.mark.uss def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): hosts = ansible_zos_module - params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + params = { + "regexp":"abcxyz", + "insertafter":"ZOAU_ROOT=", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -396,7 +431,7 @@ def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER finally: @@ -406,7 +441,12 @@ def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): @pytest.mark.uss def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): hosts = ansible_zos_module - params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + params = { + "regexp":"abcxyz", + "insertbefore":"ZOAU_ROOT=", + "line":"unset ZOAU_ROOT", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -415,7 +455,7 @@ def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE finally: @@ -425,7 +465,12 @@ def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): @pytest.mark.uss def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): hosts = ansible_zos_module - params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + params = { + "regexp":"abcxyz", + "insertafter":"xyzijk", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -434,7 +479,7 @@ def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH finally: @@ -444,7 +489,12 @@ def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): @pytest.mark.uss def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): hosts = ansible_zos_module - params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") + params = { + "regexp":"abcxyz", + "insertbefore":"xyzijk", + "line":"unset ZOAU_ROOT", + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -453,7 +503,7 @@ def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH finally: @@ -463,7 +513,11 @@ def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): @pytest.mark.uss def test_uss_line_absent(ansible_zos_module): hosts = ansible_zos_module - params = dict(regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/usr/lpp/zoautil/v100", state="absent") + params = { + "regexp":"ZOAU_ROOT=", + "line":"ZOAU_ROOT=/usr/lpp/zoautil/v100", + "state":"absent" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -473,7 +527,7 @@ def test_uss_line_absent(ansible_zos_module): for result in results.contacted.values(): print(result) assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT finally: @@ -483,7 +537,12 @@ def test_uss_line_absent(ansible_zos_module): @pytest.mark.uss def test_uss_line_replace_quoted_escaped(ansible_zos_module): hosts = ansible_zos_module - params = dict(path="", regexp="ZOAU_ROOT=", line='ZOAU_ROOT=\"/mvsutil-develop_dsed\"', state="present") + params = { + "path":"", + "regexp":"ZOAU_ROOT=", + "line":'ZOAU_ROOT=\"/mvsutil-develop_dsed\"', + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -492,7 +551,7 @@ def test_uss_line_replace_quoted_escaped(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED finally: @@ -502,7 +561,12 @@ def test_uss_line_replace_quoted_escaped(ansible_zos_module): @pytest.mark.uss def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): hosts = ansible_zos_module - params = dict(path="", regexp="ZOAU_ROOT=", line='ZOAU_ROOT="/mvsutil-develop_dsed"', state="present") + params = { + "path":"", + "regexp":"ZOAU_ROOT=", + "line":'ZOAU_ROOT="/mvsutil-develop_dsed"', + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -511,7 +575,7 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED finally: @@ -520,7 +584,11 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): @pytest.mark.uss def test_uss_line_does_not_insert_repeated(ansible_zos_module): hosts = ansible_zos_module - params = dict(path="", line='ZOAU_ROOT=/usr/lpp/zoautil/v100', state="present") + params = { + "path":"", + "line":'ZOAU_ROOT=/usr/lpp/zoautil/v100', + "state":"present" + } full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = TEST_CONTENT try: @@ -529,12 +597,12 @@ def test_uss_line_does_not_insert_repeated(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd=f"cat {params["path"]}") for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT # Run lineinfle module with same params again, ensure duplicate entry is not made into file hosts.all.zos_lineinfile(**params) - results = hosts.all.shell(cmd="""grep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' {0} """.format(params["path"])) + results = hosts.all.shell(cmd=f"grep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' {params["path"]} ") for result in results.contacted.values(): assert result.get("stdout") == '1' finally: @@ -544,14 +612,20 @@ def test_uss_line_does_not_insert_repeated(ansible_zos_module): # Dataset test cases ######################### -# Now force is parameter to change witch function to call in the helper and alter the declaration by add the force or a test name required. -# without change the original description or the other option is that at the end of the test get back to original one. +# Now force is parameter to change witch function +# to call in the helper and alter the declaration by add the force or a test name required. +# without change the original description or the other option +# is that at the end of the test get back to original one. @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) def test_ds_line_insertafter_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + params = { + "insertafter":"ZOAU_ROOT=", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -561,7 +635,7 @@ def test_ds_line_insertafter_regex(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: @@ -569,11 +643,15 @@ def test_ds_line_insertafter_regex(ansible_zos_module, dstype): @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") + params = { + "insertbefore":"ZOAU_ROOT=", + "line":"unset ZOAU_ROOT", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -583,7 +661,7 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX finally: @@ -591,11 +669,15 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) def test_ds_line_insertafter_eof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertafter="EOF", line="export 'ZOAU_ROOT'", state="present") + params = { + "insertafter":"EOF", + "line":"export 'ZOAU_ROOT'", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -605,18 +687,22 @@ def test_ds_line_insertafter_eof(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: remove_ds_environment(ansible_zos_module, ds_name) @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(insertbefore="BOF", line="# this is file is for setting env vars", state="present") + params = { + "insertbefore":"BOF", + "line":"# this is file is for setting env vars", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -626,7 +712,7 @@ def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF finally: @@ -634,11 +720,16 @@ def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(regexp="ZOAU_ROOT=", insertafter="PATH=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + params = { + "regexp":"ZOAU_ROOT=", + "insertafter":"PATH=", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -648,7 +739,7 @@ def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE finally: @@ -656,11 +747,16 @@ def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(regexp="ZOAU_ROOT=", insertbefore="PATH=", line="unset ZOAU_ROOT", state="present") + params = { + "regexp":"ZOAU_ROOT=", + "insertbefore":"PATH=", + "line":"unset ZOAU_ROOT", + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -670,7 +766,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE finally: @@ -768,11 +864,13 @@ def test_special_characters_ds_insert_line(ansible_zos_module): #GH Issue #1244 #@pytest.mark.ds -#@pytest.mark.parametrize("dstype", DS_TYPE) +#@pytest.mark.parametrize("dstype", ds_type) #def test_ds_line_replace_nomatch_insertafter_match(ansible_zos_module, dstype): # hosts = ansible_zos_module # ds_type = dstype -# params = dict(regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") +# params = dict( +# regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", +# state="present") # ds_name = get_tmp_ds_name() # temp_file = "/tmp/" + ds_name # content = TEST_CONTENT @@ -791,11 +889,12 @@ def test_special_characters_ds_insert_line(ansible_zos_module): #GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds -#@pytest.mark.parametrize("dstype", DS_TYPE) +#@pytest.mark.parametrize("dstype", ds_type) #def test_ds_line_replace_nomatch_insertbefore_match(ansible_zos_module, dstype): # hosts = ansible_zos_module # ds_type = dstype -# params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", line="unset ZOAU_ROOT", state="present") +# params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", +# line="unset ZOAU_ROOT", state="present") # ds_name = get_tmp_ds_name() # temp_file = "/tmp/" + ds_name # content = TEST_CONTENT @@ -814,11 +913,12 @@ def test_special_characters_ds_insert_line(ansible_zos_module): #GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds -#@pytest.mark.parametrize("dstype", DS_TYPE) +#@pytest.mark.parametrize("dstype", ds_type) #def test_ds_line_replace_nomatch_insertafter_nomatch(ansible_zos_module, dstype): # hosts = ansible_zos_module # ds_type = dstype -# params = dict(regexp="abcxyz", insertafter="xyzijk", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") +# params = dict(regexp="abcxyz", insertafter="xyzijk", +# line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") # ds_name = get_tmp_ds_name() # temp_file = "/tmp/" + ds_name # content = TEST_CONTENT @@ -837,7 +937,7 @@ def test_special_characters_ds_insert_line(ansible_zos_module): #GH Issue #1244 / JIRA NAZARE-10439 #@pytest.mark.ds -#@pytest.mark.parametrize("dstype", DS_TYPE) +#@pytest.mark.parametrize("dstype", ds_type) #def test_ds_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module, dstype): # hosts = ansible_zos_module # ds_type = dstype @@ -860,11 +960,15 @@ def test_special_characters_ds_insert_line(ansible_zos_module): @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) def test_ds_line_absent(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/usr/lpp/zoautil/v100", state="absent") + params = { + "regexp":"ZOAU_ROOT=", + "line":"ZOAU_ROOT=/usr/lpp/zoautil/v100", + "state":"absent" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -874,7 +978,7 @@ def test_ds_line_absent(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT finally: @@ -886,18 +990,26 @@ def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets hosts = ansible_zos_module ds_type = "seq" - kwargs = dict(backup_name=r"TMPHLQ\..") - params = dict(insertafter="EOF", line="export ZOAU_ROOT", state="present", backup=True, tmp_hlq="TMPHLQ") + kwargs = { + "backup_name":r"TMPHLQ\.." + } + params = { + "insertafter":"EOF", + "line":"export ZOAU_ROOT", + "state":"present", + "backup":True, + "tmp_hlq":"TMPHLQ" + } content = TEST_CONTENT try: ds_full_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_full_name hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) - hosts.all.shell(cmd=cmdStr) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") + cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " + hosts.all.shell(cmd=cmd_str) hosts.all.shell(cmd="rm -rf " + "/tmp/zos_lineinfile/") - results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(ds_full_name)) + results = hosts.all.shell(cmd=f"cat \"//'{ds_full_name}'\" | wc -l ") for result in results.contacted.values(): assert int(result.get("stdout")) != 0 params["path"] = ds_full_name @@ -915,7 +1027,12 @@ def test_ds_tmp_hlq_option(ansible_zos_module): def test_ds_not_supported(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") + params = { + "path":"", + "regexp":"ZOAU_ROOT=", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present" + } try: ds_name = get_tmp_ds_name() + "." + ds_type results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') @@ -931,26 +1048,37 @@ def test_ds_not_supported(ansible_zos_module, dstype): @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) def test_ds_line_force(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype default_data_set_name = get_tmp_ds_name() - params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="True") - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - TEMP_FILE = "/tmp/{0}".format(MEMBER_2) + params = { + "path":"", + "regexp":"ZOAU_ROOT=", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present", + "force":"True" + } + member_1, member_2 = "MEM1", "MEM2" + temp_file = f"/tmp/{member_2}" content = TEST_CONTENT if ds_type == "seq": - params["path"] = default_data_set_name+".{0}".format(MEMBER_2) + params["path"] = f"{default_data_set_name}.{member_2}" else: - params["path"] = default_data_set_name+"({0})".format(MEMBER_2) + params["path"] = f"{default_data_set_name}({member_2})" try: # set up: - hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + name=default_data_set_name, + state="present", + type=ds_type, + replace=True + ) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") hosts.all.zos_data_set( batch=[ - { "name": default_data_set_name + "({0})".format(MEMBER_1), + { "name": f"{default_data_set_name}({member_1})", "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, @@ -958,35 +1086,33 @@ def test_ds_line_force(ansible_zos_module, dstype): ) # write memeber to verify cases if ds_type in ["pds", "pdse"]: - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) + cmd_str = f"cp -CM {quote(temp_file)} \"//'{params["path"]}'\"" else: - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), params["path"]) - hosts.all.shell(cmd=cmdStr) - results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + cmd_str = f"cp {quote(temp_file)} \"//'{params["path"]}'\" " + hosts.all.shell(cmd=cmd_str) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" | wc -l ") for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format( - call_c_jcl.format( - default_data_set_name, - MEMBER_1), - '/tmp/disp_shr/call_c_pgm.jcl')) + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + hosts.all.shell(cmd=f"echo \"{call_c_jcl.format( + default_data_set_name,member_1 + )}\" > /tmp/disp_shr/call_c_pgm.jcl") hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) # call lineinfile to see results results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): - assert result.get("changed") == True + assert result.get("changed") is True results = hosts.all.shell(cmd=r"""cat "//'{0}'" """.format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE finally: - hosts.all.shell(cmd="rm -rf " + TEMP_FILE) + hosts.all.shell(cmd="rm -rf " + temp_file) ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r /tmp/disp_shr') hosts.all.zos_data_set(name=default_data_set_name, state="absent") @@ -997,58 +1123,71 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype default_data_set_name = get_tmp_ds_name() - params = dict(path="", regexp="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present", force="False") - MEMBER_1, MEMBER_2 = "MEM1", "MEM2" - TEMP_FILE = "/tmp/{0}".format(MEMBER_2) - params["path"] = default_data_set_name + "({0})".format(MEMBER_2) + params = { + "path":"", + "regexp":"ZOAU_ROOT=", + "line":"ZOAU_ROOT=/mvsutil-develop_dsed", + "state":"present", + "force":"False" + } + member_1, member_2 = "MEM1", "MEM2" + temp_file = f"/tmp/{member_2}" + params["path"] = f"{default_data_set_name}({member_2})" content = TEST_CONTENT try: # set up: - hosts.all.zos_data_set(name=default_data_set_name, state="present", type=ds_type, replace=True) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, TEMP_FILE)) + hosts.all.zos_data_set( + name=default_data_set_name, + state="present", + type=ds_type, + replace=True + ) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") hosts.all.zos_data_set( batch=[ - { "name": default_data_set_name + "({0})".format(MEMBER_1), + { "name": f"{default_data_set_name}({member_1})", "type": "member", "state": "present", "replace": True, }, { "name": params["path"], "type": "member", "state": "present", "replace": True, }, ] ) - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), params["path"]) - hosts.all.shell(cmd=cmdStr) - results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) + cmd_str = f"cp -CM {quote(temp_file)} \"//'{params["path"]}'\"" + hosts.all.shell(cmd=cmd_str) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" | wc -l ") for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) hosts.all.file(path="/tmp/disp_shr", state='directory') - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(c_pgm, '/tmp/disp_shr/pdse-lock.c')) - hosts.all.shell(cmd="echo \"{0}\" > {1}".format( - call_c_jcl.format( + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + hosts.all.shell(cmd=f"echo \"{call_c_jcl.format( default_data_set_name, - MEMBER_1), - '/tmp/disp_shr/call_c_pgm.jcl')) + member_1)}\" > /tmp/disp_shr/call_c_pgm.jcl" + ) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) # call lineinfile to see results results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): - assert result.get("changed") == False - assert result.get("failed") == True + assert result.get("changed") is False + assert result.get("failed") is True finally: ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] - hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r /tmp/disp_shr') hosts.all.zos_data_set(name=default_data_set_name, state="absent") @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): hosts = ansible_zos_module ds_type = dstype - params = dict(line='ZOAU_ROOT=/usr/lpp/zoautil/v100', state="present") + params = { + "line":'ZOAU_ROOT=/usr/lpp/zoautil/v100', + "state":"present" + } ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -1058,12 +1197,14 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT # Run lineinfle module with same params again, ensure duplicate entry is not made into file hosts.all.zos_lineinfile(**params) - results = hosts.all.shell(cmd="""dgrep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' "{0}" """.format(params["path"])) + results = hosts.all.shell( + cmd=f"dgrep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' '{params["path"]}' " + ) response = params["path"] + " " + "1" for result in results.contacted.values(): assert result.get("stdout") == response @@ -1079,14 +1220,22 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): def test_uss_encoding(ansible_zos_module, encoding): hosts = ansible_zos_module insert_data = "Insert this string" - params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding}) + params = { + "insertafter":"SIMPLE", + "line":insert_data, + "state":"present", + "encoding":{ + "from":"IBM-1047", + "to":encoding + } + } params["encoding"] = encoding full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] content = "SIMPLE LINE TO VERIFY" try: - hosts.all.shell(cmd="mkdir -p {0}".format(TEST_FOLDER_LINEINFILE)) + hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_LINEINFILE}") hosts.all.file(path=full_path, state="touch") - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) + hosts.all.shell(cmd=f"echo \"{content}\" > {full_path}") params["path"] = full_path results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): @@ -1099,38 +1248,48 @@ def test_uss_encoding(ansible_zos_module, encoding): @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) +@pytest.mark.parametrize("dstype", ds_type) @pytest.mark.parametrize("encoding", ["IBM-1047"]) def test_ds_encoding(ansible_zos_module, encoding, dstype): hosts = ansible_zos_module ds_type = dstype insert_data = "Insert this string" - params = dict(insertafter="SIMPLE", line=insert_data, state="present", encoding={"from":"IBM-1047", "to":encoding}) + params = { + "insertafter":"SIMPLE", + "line":insert_data, + "state":"present", + "encoding":{ + "from":"IBM-1047", + "to":encoding + } + } params["encoding"] = encoding ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = "SIMPLE LINE TO VERIFY" try: - hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, temp_file)) + hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {params['encoding']} temp_file > temp_file ") hosts.all.zos_data_set(name=ds_name, type=ds_type) if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") - cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), ds_full_name) + cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: ds_full_name = ds_name - cmdStr = "cp {0} \"//'{1}'\" ".format(quote(temp_file), ds_full_name) - hosts.all.shell(cmd=cmdStr) + cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " + hosts.all.shell(cmd=cmd_str) hosts.all.shell(cmd="rm -rf " + temp_file) params["path"] = ds_full_name results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - hosts.all.shell(cmd=f"iconv -f {encoding} -t IBM-1047 \"{ds_full_name}\" > \"{ds_full_name}\" ") - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(ds_full_name)) + hosts.all.shell( + cmd=f"iconv -f {encoding} -t IBM-1047 \"{ds_full_name}\" > \"{ds_full_name}\" " + ) + results = hosts.all.shell(cmd=f"cat \"//'{ds_full_name}'\" ") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING finally: - remove_ds_environment(ansible_zos_module, ds_name) \ No newline at end of file + remove_ds_environment(ansible_zos_module, ds_name) diff --git a/tests/functional/modules/test_zos_script_func.py b/tests/functional/modules/test_zos_script_func.py index 8bc310fe5..ee213bdf0 100644 --- a/tests/functional/modules/test_zos_script_func.py +++ b/tests/functional/modules/test_zos_script_func.py @@ -15,12 +15,14 @@ from __future__ import absolute_import, division, print_function +import os +import tempfile import pytest __metaclass__ = type # Using || to concatenate strings without extra spaces. -rexx_script_args = """/* REXX */ +REXX_SCRIPT_ARGS = """/* REXX */ parse arg A ',' B say 'args are ' || A || ',' || B return 0 @@ -28,7 +30,7 @@ """ # For validating that chdir gets honored by the module. -rexx_script_chdir = """/* REXX */ +REXX_SCRIPT_CHDIR = """/* REXX */ address syscall 'getcwd cwd' say cwd return 0 @@ -37,7 +39,7 @@ # For testing a default template. Note that the Jinja variable is static # and it's always called playbook_msg. -rexx_script_template_default = """/* REXX */ +REXX_SCRIPT_TEMPLATE_DEFAULT = """/* REXX */ say '{{ playbook_msg }}' return 0 @@ -45,7 +47,7 @@ # For testing templates with custom markers. Here the markers are static # too (always '((', '))', '&$' and '$&'). -rexx_script_template_custom = """/* REXX */ +REXX_SCRIPT_TEMPLATE_CUSTOM = """/* REXX */ &$ This is a comment that should create problems if not substituted $& say '(( playbook_msg ))' return 0 @@ -56,38 +58,38 @@ def create_script_content(msg, script_type): """Returns a string containing either a valid REXX script or a valid Python script. The script will print the given message.""" + + if not script_type in ['rexx','python']: + raise ValueError(f'Type {script_type} is not valid.') if script_type == 'rexx': # Without the comment in the first line, the interpreter will not be # able to run the script. # Without the last blank line, the REXX interpreter will throw # an error. - return """/* REXX */ -say '{0}' + content = f"""/* REXX */ +say '{msg}' return 0 -""".format(msg) - elif script_type == 'python': - return """msg = "{0}" -print(msg) -""".format(msg) +""" else: - raise Exception('Type {0} is not valid.'.format(script_type)) + content = f"""msg = "{msg}" +print(msg) +""" + return content def create_python_script_stderr(msg, rc): """Returns a Python script that will write out to STDERR and return a given RC. The RC can be 0, but for testing it would be better if it was something else.""" - return """import sys -print('{0}', file=sys.stderr) -exit({1}) -""".format(msg, rc) + return f"""import sys +print('{msg}', file=sys.stderr) +exit({rc}) +""" def create_local_file(content, suffix): """Creates a tempfile that has the given content.""" - import os - import tempfile fd, file_path = tempfile.mkstemp( prefix='zos_script', @@ -95,15 +97,13 @@ def create_local_file(content, suffix): ) os.close(fd) - with open(file_path, 'w') as f: + with open(file_path, 'w', encoding="utf-8") as f: f.write(content) return file_path def test_rexx_script_without_args(ansible_zos_module): - import os - hosts = ansible_zos_module try: @@ -127,8 +127,6 @@ def test_rexx_script_without_args(ansible_zos_module): def test_rexx_remote_script(ansible_zos_module): - import os - hosts = ansible_zos_module try: @@ -176,16 +174,14 @@ def test_rexx_remote_script(ansible_zos_module): def test_rexx_script_with_args(ansible_zos_module): - import os - hosts = ansible_zos_module try: - rexx_script = rexx_script_args + rexx_script = REXX_SCRIPT_ARGS script_path = create_local_file(rexx_script, 'rexx') args = '1,2' - cmd = "{0} '{1}'".format(script_path, args) + cmd = f"{script_path} '{args}'" zos_script_result = hosts.all.zos_script( cmd=cmd @@ -195,7 +191,7 @@ def test_rexx_script_with_args(ansible_zos_module): assert result.get('changed') is True assert result.get('failed', False) is False assert result.get('rc') == 0 - assert result.get('stdout', '').strip() == 'args are {0}'.format(args) + assert result.get('stdout', '').strip() == f'args are {args}' assert result.get('stderr', '') == '' finally: if os.path.exists(script_path): @@ -203,12 +199,10 @@ def test_rexx_script_with_args(ansible_zos_module): def test_rexx_script_chdir(ansible_zos_module): - import os - hosts = ansible_zos_module try: - rexx_script = rexx_script_chdir + rexx_script = REXX_SCRIPT_CHDIR script_path = create_local_file(rexx_script, 'rexx') tmp_remote_dir = '/zos_script_tests' @@ -238,8 +232,6 @@ def test_rexx_script_chdir(ansible_zos_module): def test_python_script(ansible_zos_module): - import os - hosts = ansible_zos_module try: @@ -265,8 +257,6 @@ def test_python_script(ansible_zos_module): def test_rexx_script_creates_option(ansible_zos_module): - import os - hosts = ansible_zos_module try: @@ -299,8 +289,6 @@ def test_rexx_script_creates_option(ansible_zos_module): def test_rexx_script_removes_option(ansible_zos_module): - import os - hosts = ansible_zos_module try: @@ -326,18 +314,17 @@ def test_rexx_script_removes_option(ansible_zos_module): def test_script_template_with_default_markers(ansible_zos_module): - import os - hosts = ansible_zos_module try: - rexx_script = rexx_script_template_default + rexx_script = REXX_SCRIPT_TEMPLATE_DEFAULT script_path = create_local_file(rexx_script, 'rexx') # Updating the vars available to the tasks. - template_vars = dict( - playbook_msg='Success' - ) + template_vars = { + "playbook_msg":'Success' + } + # pylint: disable-next=protected-access for host in hosts['options']['inventory_manager']._inventory.hosts.values(): host.vars.update(template_vars) @@ -358,30 +345,29 @@ def test_script_template_with_default_markers(ansible_zos_module): def test_script_template_with_custom_markers(ansible_zos_module): - import os - hosts = ansible_zos_module try: - rexx_script = rexx_script_template_custom + rexx_script = REXX_SCRIPT_TEMPLATE_CUSTOM script_path = create_local_file(rexx_script, 'rexx') # Updating the vars available to the tasks. - template_vars = dict( - playbook_msg='Success' - ) + template_vars = { + "playbook_msg":'Success' + } + # pylint: disable-next=protected-access for host in hosts['options']['inventory_manager']._inventory.hosts.values(): host.vars.update(template_vars) zos_script_result = hosts.all.zos_script( cmd=script_path, use_template=True, - template_parameters=dict( - variable_start_string='((', - variable_end_string='))', - comment_start_string='&$', - comment_end_string='$&', - ) + template_parameters={ + "variable_start_string":'((', + "variable_end_string":'))', + "comment_start_string":'&$', + "comment_end_string":'$&', + } ) for result in zos_script_result.contacted.values(): @@ -396,8 +382,6 @@ def test_script_template_with_custom_markers(ansible_zos_module): def test_python_script_with_stderr(ansible_zos_module): - import os - hosts = ansible_zos_module try: From cbad2f43902da102c93eaa4a441048deae8e4a92 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Thu, 13 Jun 2024 10:35:45 -0600 Subject: [PATCH 414/495] [Enabler][test_zos_job_submit_func] Update test suites on functional/modules/test_zos_job_submit_func.py to be pylint correct (#1464) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_job_submit_func.py to be pylint correct * Update test_zos_job_submit_func.py * Add to test cases --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> --- .../modules/test_zos_job_submit_func.py | 227 +++++++++++------- 1 file changed, 137 insertions(+), 90 deletions(-) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index d0e452ac2..927dcfaad 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -15,11 +15,11 @@ __metaclass__ = type -from shellescape import quote import tempfile -import pytest import re import os +from shellescape import quote +import pytest from ibm_zos_core.tests.helpers.volumes import Volume_Handler from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name @@ -403,11 +403,15 @@ @pytest.mark.parametrize( "location", [ - dict(default_location=True), - dict(default_location=False), + { + "default_location":True + }, + { + "default_location":False + }, ] ) -def test_job_submit_PDS(ansible_zos_module, location): +def test_job_submit_pds(ansible_zos_module, location): """ Test zos_job_submit with a PDS(MEMBER), also test the default value for 'location', ensure it works with and without the @@ -444,20 +448,23 @@ def test_job_submit_PDS(ansible_zos_module, location): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") -def test_job_submit_PDS_special_characters(ansible_zos_module): +def test_job_submit_pds_special_characters(ansible_zos_module): try: hosts = ansible_zos_module hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) - hosts.all.zos_data_set( + results = hosts.all.zos_data_set( name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="pds", replace=True ) + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format( TEMP_PATH, DATA_SET_NAME_SPECIAL_CHARS.replace('$', '\$') @@ -476,7 +483,7 @@ def test_job_submit_PDS_special_characters(ansible_zos_module): hosts.all.zos_data_set(name=DATA_SET_NAME_SPECIAL_CHARS, state="absent") -def test_job_submit_USS(ansible_zos_module): +def test_job_submit_uss(ansible_zos_module): try: hosts = ansible_zos_module hosts.all.file(path=TEMP_PATH, state="directory") @@ -484,7 +491,7 @@ def test_job_submit_USS(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), location="uss", volume=None + src=f"{TEMP_PATH}/SAMPLE", location="uss", volume=None ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" @@ -494,23 +501,22 @@ def test_job_submit_USS(ansible_zos_module): hosts.all.file(path=TEMP_PATH, state="absent") -def test_job_submit_LOCAL(ansible_zos_module): +def test_job_submit_local(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) for result in results.contacted.values(): - print(result) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True -def test_job_submit_LOCAL_extraR(ansible_zos_module): +def test_job_submit_local_extra_r(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_BACKSLASH_R) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) @@ -521,9 +527,9 @@ def test_job_submit_LOCAL_extraR(ansible_zos_module): assert result.get("changed") is True -def test_job_submit_LOCAL_BADJCL(ansible_zos_module): +def test_job_submit_local_badjcl(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_BAD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) @@ -534,7 +540,7 @@ def test_job_submit_LOCAL_BADJCL(ansible_zos_module): assert re.search(r'completion code', repr(result.get("msg"))) -def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): +def test_job_submit_pds_volume(ansible_zos_module, volumes_on_systems): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() @@ -558,7 +564,11 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): name=data_set_name, state="uncataloged", type="pds" ) - results = hosts.all.zos_job_submit(src=data_set_name+"(SAMPLE)", location="data_set", volume=volume_1) + results = hosts.all.zos_job_submit( + src=data_set_name+"(SAMPLE)", + location="data_set", + volume=volume_1 + ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 @@ -568,7 +578,7 @@ def test_job_submit_PDS_volume(ansible_zos_module, volumes_on_systems): hosts.all.zos_data_set(name=data_set_name, state="absent") -def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): +def test_job_submit_pds_5_sec_job_wait_15(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() @@ -576,7 +586,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): wait_time_s = 15 hosts.all.shell( - cmd="echo {0} > {1}/BPXSLEEP".format(quote(JCL_FILE_CONTENTS_05_SEC), TEMP_PATH) + cmd=f"echo {quote(JCL_FILE_CONTENTS_05_SEC)} > {TEMP_PATH}/BPXSLEEP" ) hosts.all.zos_data_set( @@ -584,7 +594,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): ) hosts.all.shell( - cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, data_set_name) + cmd=f"cp {TEMP_PATH}/BPXSLEEP \"//'{data_set_name}(BPXSLEEP)'\"" ) hosts = ansible_zos_module @@ -601,7 +611,7 @@ def test_job_submit_PDS_5_SEC_JOB_WAIT_15(ansible_zos_module): hosts.all.zos_data_set(name=data_set_name, state="absent") -def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): +def test_job_submit_pds_30_sec_job_wait_60(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() @@ -609,7 +619,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): wait_time_s = 60 hosts.all.shell( - cmd="echo {0} > {1}/BPXSLEEP".format(quote(JCL_FILE_CONTENTS_30_SEC), TEMP_PATH) + cmd=f"echo {quote(JCL_FILE_CONTENTS_30_SEC)} > {TEMP_PATH}/BPXSLEEP" ) hosts.all.zos_data_set( @@ -617,7 +627,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): ) hosts.all.shell( - cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, data_set_name) + cmd=f"cp {TEMP_PATH}/BPXSLEEP \"//'{data_set_name}(BPXSLEEP)'\"" ) hosts = ansible_zos_module @@ -633,7 +643,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_60(ansible_zos_module): hosts.all.file(path=TEMP_PATH, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") -def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): +def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): """This submits a 30 second job and only waits 10 seconds""" try: hosts = ansible_zos_module @@ -642,7 +652,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): wait_time_s = 10 hosts.all.shell( - cmd="echo {0} > {1}/BPXSLEEP".format(quote(JCL_FILE_CONTENTS_30_SEC), TEMP_PATH) + cmd=f"echo {quote(JCL_FILE_CONTENTS_30_SEC)} > {TEMP_PATH}/BPXSLEEP" ) hosts.all.zos_data_set( @@ -650,7 +660,7 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): ) hosts.all.shell( - cmd="cp {0}/BPXSLEEP \"//'{1}(BPXSLEEP)'\"".format(TEMP_PATH, data_set_name) + cmd=f"cp {TEMP_PATH}/BPXSLEEP \"//'{data_set_name}(BPXSLEEP)'\"" ) hosts = ansible_zos_module @@ -669,20 +679,32 @@ def test_job_submit_PDS_30_SEC_JOB_WAIT_10_negative(ansible_zos_module): @pytest.mark.parametrize("args", [ - dict(max_rc=None, wait_time_s=10), - dict(max_rc=4, wait_time_s=10), - dict(max_rc=12, wait_time_s=20) + { + "max_rc":None, + "wait_time_s":10 + }, + { + "max_rc":4, + "wait_time_s":10 + }, + { + "max_rc":12, + "wait_time_s":20 + } ]) def test_job_submit_max_rc(ansible_zos_module, args): """This""" try: hosts = ansible_zos_module tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_RC_8) results = hosts.all.zos_job_submit( - src=tmp_file.name, location="local", max_rc=args["max_rc"], wait_time_s=args["wait_time_s"] + src=tmp_file.name, + location="local", + max_rc=args["max_rc"], + wait_time_s=args["wait_time_s"] ) for result in results.contacted.values(): @@ -706,10 +728,15 @@ def test_job_submit_max_rc(ansible_zos_module, args): elif args["max_rc"] == 4: assert result.get("msg") is not None assert result.get('changed') is False - # Expecting "The job return code, 'ret_code[code]' 8 for the submitted job is greater - # than the value set for option 'max_rc' 4. Increase the value for 'max_rc' otherwise + # Expecting "The job return code, + # 'ret_code[code]' 8 for the submitted job is greater + # than the value set for option 'max_rc' 4. + # Increase the value for 'max_rc' otherwise # this job submission has failed. - assert re.search(r'the submitted job is greater than the value set for option', repr(result.get("msg"))) + assert re.search( + r'the submitted job is greater than the value set for option', + repr(result.get("msg")) + ) elif args["max_rc"] == 12: # Will not fail and as the max_rc is set to 12 and the rc is 8 is a change true @@ -724,47 +751,56 @@ def test_job_submit_max_rc(ansible_zos_module, args): @pytest.mark.template @pytest.mark.parametrize("args", [ - dict( - template="Default", - options=dict( - keep_trailing_newline=False - ) - ), - dict( - template="Custom", - options=dict( - keep_trailing_newline=False, - variable_start_string="((", - variable_end_string="))", - comment_start_string="(#", - comment_end_string="#)" - ) - ), - dict( - template="Loop", - options=dict( - keep_trailing_newline=False - ) - ) + { + "template":"Default", + "options":{ + "keep_trailing_newline":False + } + }, + { + "template":"Custom", + "options":{ + "keep_trailing_newline":False, + "variable_start_string":"((", + "variable_end_string":"))", + "comment_start_string":"(#", + "comment_end_string":"#)" + } + }, + { + "template":"Loop", + "options":{ + "keep_trailing_newline":False + } + } ]) def test_job_submit_jinja_template(ansible_zos_module, args): try: hosts = ansible_zos_module tmp_file = tempfile.NamedTemporaryFile(delete=False) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_TEMPLATES[args["template"]]) - template_vars = dict( - pgm_name="HELLO", - input_dataset="DUMMY", - message="Hello, world", - steps=[ - dict(step_name="IN", dd="DUMMY"), - dict(step_name="PRINT", dd="SYSOUT=*"), - dict(step_name="UT1", dd="*") + template_vars = { + "pgm_name":"HELLO", + "input_dataset":"DUMMY", + "message":"Hello, world", + "steps":[ + { + "step_name":"IN", + "dd":"DUMMY" + }, + { + "step_name":"PRINT", + "dd":"SYSOUT=*" + }, + { + "step_name":"UT1", + "dd":"*" + } ] - ) + } for host in hosts["options"]["inventory_manager"]._inventory.hosts.values(): host.vars.update(template_vars) @@ -789,10 +825,10 @@ def test_job_submit_full_input(ansible_zos_module): hosts = ansible_zos_module hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FULL_INPUT), TEMP_PATH) + cmd=f"echo {quote(JCL_FULL_INPUT)} > {TEMP_PATH}/SAMPLE" ) results = hosts.all.zos_job_submit( - src="{0}/SAMPLE".format(TEMP_PATH), + src=f"{TEMP_PATH}/SAMPLE", location="uss", volume=None, # This job used to set wait=True, but since it has been deprecated @@ -800,7 +836,6 @@ def test_job_submit_full_input(ansible_zos_module): wait_time_s=30 ) for result in results.contacted.values(): - print(result) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True @@ -809,11 +844,10 @@ def test_job_submit_full_input(ansible_zos_module): def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_NO_DSN) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="local") - import pprint for result in results.contacted.values(): assert result.get("changed") is False assert re.search(r'completion code', repr(result.get("msg"))) @@ -822,7 +856,7 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_INVALID_USER) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, location="local") @@ -832,12 +866,15 @@ def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): assert re.search(r'please review the error for further details', repr(result.get("msg"))) assert re.search(r'please review the job log for status SEC', repr(result.get("msg"))) assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'please review the job log for status SEC', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert re.search( + r'please review the job log for status SEC', + repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + ) def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, @@ -850,7 +887,10 @@ def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'run with special job processing TYPRUN=SCAN', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert re.search( + r'run with special job processing TYPRUN=SCAN', + repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + ) assert result.get("jobs")[0].get("ret_code").get("code") is None assert result.get("jobs")[0].get("ret_code").get("msg") == "TYPRUN=SCAN" assert result.get("jobs")[0].get("ret_code").get("msg_code") is None @@ -858,7 +898,7 @@ def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_TYPRUN_COPY) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, @@ -868,12 +908,13 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): "from": "UTF-8", "to": "IBM-1047" },) - import pprint for result in results.contacted.values(): - pprint.pprint(result) assert result.get("changed") is False assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'please review the job log', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert re.search( + r'please review the job log', + repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + ) assert result.get("jobs")[0].get("ret_code").get("code") is None assert result.get("jobs")[0].get("ret_code").get("msg") is None assert result.get("jobs")[0].get("ret_code").get("msg_code") is None @@ -881,7 +922,7 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_TYPRUN_HOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, @@ -894,7 +935,10 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert re.search( + r'long running job', + repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + ) assert result.get("jobs")[0].get("ret_code").get("code") is None assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" assert result.get("jobs")[0].get("ret_code").get("msg_code") is None @@ -902,7 +946,7 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_TYPRUN_JCLHOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, @@ -915,7 +959,10 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("jobs")[0].get("job_id") is not None - assert re.search(r'long running job', repr(result.get("jobs")[0].get("ret_code").get("msg_txt"))) + assert re.search( + r'long running job', + repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + ) assert result.get("jobs")[0].get("ret_code").get("code") is None assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" assert result.get("jobs")[0].get("ret_code").get("msg_code") is None @@ -1005,12 +1052,12 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): # Copy C source and compile it. hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/noprint.c".format(quote(C_SRC_INVALID_UTF8), TEMP_PATH) + cmd=f"echo {quote(C_SRC_INVALID_UTF8)} > {TEMP_PATH}/noprint.c" ) - hosts.all.shell(cmd="xlc -o {0}/noprint {0}/noprint.c".format(TEMP_PATH)) + hosts.all.shell(cmd=f"xlc -o {TEMP_PATH}/noprint {TEMP_PATH}/noprint.c") # Create local JCL and submit it. tmp_file = tempfile.NamedTemporaryFile(delete=True) - with open(tmp_file.name, "w") as f: + with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_INVALID_UTF8_CHARS_EXC.format(TEMP_PATH)) results = hosts.all.zos_job_submit( @@ -1026,4 +1073,4 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") \ No newline at end of file + hosts.all.file(path=TEMP_PATH, state="absent") From 46c4f28ee36e960a5b7fdf2fe69292d664ccc44e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 13 Jun 2024 12:20:42 -0600 Subject: [PATCH 415/495] [bugfix][1520]mvs_raw_fix_verbose_and_first_character (#1543) * Fix verbose and first character * Add fragment * Update 1543-mvs_raw_fix_verbose_and_first_character.yml correcting section header. --------- Co-authored-by: Rich Parker <richp405@gmail.com> --- .../1543-mvs_raw_fix_verbose_and_first_character.yml | 4 ++++ plugins/module_utils/dd_statement.py | 2 +- plugins/module_utils/zos_mvs_raw.py | 4 ++++ 3 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1543-mvs_raw_fix_verbose_and_first_character.yml diff --git a/changelogs/fragments/1543-mvs_raw_fix_verbose_and_first_character.yml b/changelogs/fragments/1543-mvs_raw_fix_verbose_and_first_character.yml new file mode 100644 index 000000000..0cbcc202e --- /dev/null +++ b/changelogs/fragments/1543-mvs_raw_fix_verbose_and_first_character.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_mvs_raw - DD_output first character from each line was missing. Change now includes the first character + of each line. + (https://github.com/ansible-collections/ibm_zos_core/pull/1543). diff --git a/plugins/module_utils/dd_statement.py b/plugins/module_utils/dd_statement.py index 5bef5d81a..e7a7a8c91 100644 --- a/plugins/module_utils/dd_statement.py +++ b/plugins/module_utils/dd_statement.py @@ -819,7 +819,7 @@ class OutputDefinition(DataDefinition): def __init__( self, tmphlq="", - record_format="FBA", + record_format="FB", space_primary=100, space_secondary=50, space_type="trk", diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index 5afe05c50..d6251a69d 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -60,6 +60,8 @@ def execute(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): MVSCmd._build_command(pgm, dds, parm), ) rc, out, err = module.run_command(command) + if rc == 0 and verbose: + out = err return MVSCmdResponse(rc, out, err) @staticmethod @@ -90,6 +92,8 @@ def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=No MVSCmd._build_command(pgm, dds, parm), ) rc, out, err = module.run_command(command) + if rc == 0 and verbose: + out = err return MVSCmdResponse(rc, out, err) @staticmethod From cc108c7d9773099e994f5239d61cda4f0849d35d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 20 Jun 2024 11:11:00 -0600 Subject: [PATCH 416/495] Fix lineinfile (#1545) * Fix lineinfile * Fix typos * Add lineinfile solution * Updated force fail test cases * modified quotes * Updated test --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../modules/test_zos_lineinfile_func.py | 81 +++++++++---------- 1 file changed, 40 insertions(+), 41 deletions(-) diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 92b970040..6e83a2fed 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -31,7 +31,7 @@ { char dsname[ strlen(argv[1]) + 4]; sprintf(dsname, \\\"//'%s'\\\", argv[1]); - file* member; + FILE* member; member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); @@ -267,7 +267,7 @@ def test_uss_line_replace(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE finally: @@ -290,7 +290,7 @@ def test_uss_line_insertafter_regex(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: @@ -313,7 +313,7 @@ def test_uss_line_insertbefore_regex(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX finally: @@ -336,7 +336,7 @@ def test_uss_line_insertafter_eof(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: @@ -359,7 +359,7 @@ def test_uss_line_insertbefore_bof(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF finally: @@ -383,7 +383,7 @@ def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE finally: @@ -407,7 +407,7 @@ def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE finally: @@ -431,7 +431,7 @@ def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER finally: @@ -455,7 +455,7 @@ def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE finally: @@ -479,7 +479,7 @@ def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH finally: @@ -503,7 +503,7 @@ def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH finally: @@ -527,7 +527,7 @@ def test_uss_line_absent(ansible_zos_module): for result in results.contacted.values(): print(result) assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT finally: @@ -551,7 +551,7 @@ def test_uss_line_replace_quoted_escaped(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED finally: @@ -575,7 +575,7 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED finally: @@ -597,12 +597,12 @@ def test_uss_line_does_not_insert_repeated(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat {params["path"]}") + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT # Run lineinfle module with same params again, ensure duplicate entry is not made into file hosts.all.zos_lineinfile(**params) - results = hosts.all.shell(cmd=f"grep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' {params["path"]} ") + results = hosts.all.shell(cmd="grep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' {0} ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == '1' finally: @@ -635,7 +635,7 @@ def test_ds_line_insertafter_regex(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: @@ -661,7 +661,7 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX finally: @@ -687,7 +687,7 @@ def test_ds_line_insertafter_eof(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: @@ -712,7 +712,7 @@ def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF finally: @@ -739,7 +739,7 @@ def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE finally: @@ -766,7 +766,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE finally: @@ -978,7 +978,7 @@ def test_ds_line_absent(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT finally: @@ -1086,18 +1086,18 @@ def test_ds_line_force(ansible_zos_module, dstype): ) # write memeber to verify cases if ds_type in ["pds", "pdse"]: - cmd_str = f"cp -CM {quote(temp_file)} \"//'{params["path"]}'\"" + cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), params["path"]) else: - cmd_str = f"cp {quote(temp_file)} \"//'{params["path"]}'\" " + cmd_str = "cp {0} \"//'{1}'\" ".format(quote(temp_file), params["path"]) hosts.all.shell(cmd=cmd_str) - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" | wc -l ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") - hosts.all.shell(cmd=f"echo \"{call_c_jcl.format( - default_data_set_name,member_1 - )}\" > /tmp/disp_shr/call_c_pgm.jcl") + hosts.all.shell(cmd="echo \"{0}\" > /tmp/disp_shr/call_c_pgm.jcl".format(call_c_jcl.format( + default_data_set_name,member_1)) + ) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") time.sleep(5) @@ -1128,7 +1128,7 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): "regexp":"ZOAU_ROOT=", "line":"ZOAU_ROOT=/mvsutil-develop_dsed", "state":"present", - "force":"False" + "force":False } member_1, member_2 = "MEM1", "MEM2" temp_file = f"/tmp/{member_2}" @@ -1151,17 +1151,16 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): "state": "present", "replace": True, }, ] ) - cmd_str = f"cp -CM {quote(temp_file)} \"//'{params["path"]}'\"" + cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), params["path"]) hosts.all.shell(cmd=cmd_str) - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" | wc -l ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.file(path="/tmp/disp_shr", state='directory') - hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") - hosts.all.shell(cmd=f"echo \"{call_c_jcl.format( - default_data_set_name, - member_1)}\" > /tmp/disp_shr/call_c_pgm.jcl" + results = hosts.all.file(path="/tmp/disp_shr", state='directory') + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + hosts.all.shell(cmd="echo \"{0}\" > /tmp/disp_shr/call_c_pgm.jcl".format(call_c_jcl.format( + default_data_set_name,member_1)) ) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") @@ -1197,14 +1196,14 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd=f"cat \"//'{params["path"]}'\" ") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT # Run lineinfle module with same params again, ensure duplicate entry is not made into file hosts.all.zos_lineinfile(**params) results = hosts.all.shell( - cmd=f"dgrep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' '{params["path"]}' " - ) + cmd="dgrep -c 'ZOAU_ROOT=/usr/lpp/zoautil/v10' '{0}' ".format(params["path"]) + ) response = params["path"] + " " + "1" for result in results.contacted.values(): assert result.get("stdout") == response From d910975b5c037b0fd4b1116b7192afb01e650616 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Thu, 20 Jun 2024 10:40:00 -0700 Subject: [PATCH 417/495] [Enabler] [zos_encode] Support for GDS in zos_encode (#1531) * Update validation to use new data set class * Add support for GDG/GDS * Add negative GDS test * Add more GDS tests * Fix dest validations * Update docs * Add more negative tests * Add tests for GDS destinations * Add GDS support for backups * Add tests for GDS backup support * Add test for data set with special symbols * Add changelog fragment * Update docs --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1531-zos_encode_gdg_support.yml | 4 + docs/source/modules/zos_encode.rst | 28 +- plugins/module_utils/encode.py | 6 +- plugins/modules/zos_encode.py | 131 ++++++- .../modules/test_zos_encode_func.py | 366 ++++++++++++++++++ 5 files changed, 513 insertions(+), 22 deletions(-) create mode 100644 changelogs/fragments/1531-zos_encode_gdg_support.yml diff --git a/changelogs/fragments/1531-zos_encode_gdg_support.yml b/changelogs/fragments/1531-zos_encode_gdg_support.yml new file mode 100644 index 000000000..44735aff2 --- /dev/null +++ b/changelogs/fragments/1531-zos_encode_gdg_support.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_encode - add support for encoding generation data sets (GDS), as well + as using a GDS for backup. + (https://github.com/ansible-collections/ibm_zos_core/pull/1531). \ No newline at end of file diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 1ae892acc..51bcca12d 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -54,12 +54,14 @@ encoding src - The location can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or KSDS (VSAM data set). + The location can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, a generation data set (GDS) or KSDS (VSAM data set). The USS path or file must be an absolute pathname. If \ :emphasis:`src`\ is a USS directory, all files will be encoded. + Encoding a whole generation data group (GDG) is not supported. + | **required**: True | **type**: str @@ -67,7 +69,7 @@ src dest The location where the converted characters are output. - The destination \ :emphasis:`dest`\ can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or KSDS (VSAM data set). + The destination \ :emphasis:`dest`\ can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, a generation data set (GDS) or KSDS (VSAM data set). If the length of the PDSE member name used in \ :emphasis:`dest`\ is greater than 8 characters, the member name will be truncated when written out. @@ -75,6 +77,8 @@ dest The USS file or path must be an absolute pathname. + If \ :emphasis:`dest`\ is a data set, it must be already allocated. + | **required**: False | **type**: str @@ -100,6 +104,8 @@ backup_name \ :literal:`backup\_name`\ will be returned on either success or failure of module execution such that data can be retrieved. + If \ :emphasis:`backup\_name`\ is a generation data set (GDS), it must be a relative positive name (for example, \ :literal:`HLQ.USER.GDG(+1)`\ ). + | **required**: False | **type**: str @@ -253,6 +259,24 @@ Examples from: ISO8859-1 to: IBM-1047 + - name: Convert file encoding from a USS file to a generation data set + zos_encode: + src: /zos_encode/test.data + dest: USER.TEST.GDG(0) + encoding: + from: ISO8859-1 + to: IBM-1047 + + - name: Convert file encoding from a USS file to a data set while using a GDG for backup + zos_encode: + src: /zos_encode/test.data + dest: USER.TEST.PS + encoding: + from: ISO8859-1 + to: IBM-1047 + backup: true + backup_name: USER.BACKUP.GDG(+1) + diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 83e9746c0..c69e2ebd9 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -511,7 +511,7 @@ def mvs_convert_encoding( if src_type == "PO": temp_src = mkdtemp() rc, out, err = copy.copy_pds2uss(src, temp_src) - if src_type == "VSAM": + if src_type == "KSDS": reclen, space_u = self.listdsi_data_set(src.upper()) # RDW takes the first 4 bytes in the VB format, hence we need to add an extra buffer to the vsam max recl. reclen += 4 @@ -520,7 +520,7 @@ def mvs_convert_encoding( temp_src_fo = NamedTemporaryFile() temp_src = temp_src_fo.name rc, out, err = copy.copy_ps2uss(temp_ps, temp_src) - if dest_type == "PS" or dest_type == "VSAM": + if dest_type == "PS" or dest_type == "KSDS": temp_dest_fo = NamedTemporaryFile() temp_dest = temp_dest_fo.name if dest_type == "PO": @@ -530,7 +530,7 @@ def mvs_convert_encoding( if not dest_type: convert_rc = True else: - if dest_type == "VSAM": + if dest_type == "KSDS": reclen, space_u = self.listdsi_data_set(dest.upper()) # RDW takes the first 4 bytes or records in the VB format, hence we need to add an extra buffer to the vsam max recl. reclen += 4 diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index e9afa4994..40b70a0fd 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -55,24 +55,26 @@ src: description: - The location can be a UNIX System Services (USS) file or path, - PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or - KSDS (VSAM data set). + PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, a + generation data set (GDS) or KSDS (VSAM data set). - The USS path or file must be an absolute pathname. - If I(src) is a USS directory, all files will be encoded. + - Encoding a whole generation data group (GDG) is not supported. required: true type: str dest: description: - The location where the converted characters are output. - The destination I(dest) can be a UNIX System Services (USS) file or path, - PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, or - KSDS (VSAM data set). + PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, a + generation data set (GDS) or KSDS (VSAM data set). - If the length of the PDSE member name used in I(dest) is greater than 8 characters, the member name will be truncated when written out. - If I(dest) is not specified, the I(src) will be used as the destination and will overwrite the I(src) with the character set in the option I(to_encoding). - The USS file or path must be an absolute pathname. + - If I(dest) is a data set, it must be already allocated. required: false type: str backup: @@ -99,6 +101,8 @@ by IBM Z Open Automation Utilities. - C(backup_name) will be returned on either success or failure of module execution such that data can be retrieved. + - If I(backup_name) is a generation data set (GDS), it must be a relative + positive name (for example, V(HLQ.USER.GDG(+1\))). required: false type: str backup_compress: @@ -249,6 +253,24 @@ encoding: from: ISO8859-1 to: IBM-1047 + +- name: Convert file encoding from a USS file to a generation data set + zos_encode: + src: /zos_encode/test.data + dest: USER.TEST.GDG(0) + encoding: + from: ISO8859-1 + to: IBM-1047 + +- name: Convert file encoding from a USS file to a data set while using a GDG for backup + zos_encode: + src: /zos_encode/test.data + dest: USER.TEST.PS + encoding: + from: ISO8859-1 + to: IBM-1047 + backup: true + backup_name: USER.BACKUP.GDG(+1) """ RETURN = r""" @@ -494,6 +516,8 @@ def run_module(): is_mvs_dest = False ds_type_src = None ds_type_dest = None + src_data_set = None + dest_data_set = None convert_rc = False changed = False @@ -503,20 +527,79 @@ def run_module(): try: # Check the src is a USS file/path or an MVS data set - is_uss_src, is_mvs_src, ds_type_src = check_file(src) - if is_uss_src: - verify_uss_path_exists(src) + # is_uss_src, is_mvs_src, ds_type_src = check_file(src) + + if path.sep in src: + is_uss_src = True + # ds_type_src = "USS" + verify_uss_path_exists(src) # This can raise an exception. + else: + is_mvs_src = True + src_data_set = data_set.MVSDataSet(src) + is_name_member = data_set.is_member(src_data_set.name) + dest_exists = False + + if not is_name_member: + dest_exists = data_set.DataSet.data_set_exists(src_data_set.name) + else: + dest_exists = data_set.DataSet.data_set_exists(data_set.extract_dsname(src_data_set.name)) + + if not dest_exists: + raise EncodeError( + "Data set {0} is not cataloged, please check data set provided in " + "the src option.".format(data_set.extract_dsname(src_data_set.raw_name)) + ) + + if is_name_member: + if not data_set.DataSet.data_set_member_exists(src_data_set.name): + raise EncodeError("Cannot find member {0} in {1}".format( + data_set.extract_member(src_data_set.raw_name), + data_set.extract_dsname(src_data_set.raw_name) + )) + ds_type_src = "PS" + else: + ds_type_src = data_set.DataSet.data_set_type(src_data_set.name) + + if not ds_type_src: + raise EncodeError("Unable to determine data set type of {0}".format(src_data_set.raw_name)) + result["src"] = src # Check the dest is a USS file/path or an MVS data set # if the dest is not specified, the value in the src will be used if not dest: - dest = src + if src_data_set: + dest = src_data_set.name + else: + dest = src + is_uss_dest = is_uss_src is_mvs_dest = is_mvs_src ds_type_dest = ds_type_src else: - is_uss_dest, is_mvs_dest, ds_type_dest = check_file(dest) + if path.sep in dest: + is_uss_dest = True + else: + is_mvs_dest = True + dest_data_set = data_set.MVSDataSet(dest) + is_name_member = data_set.is_member(dest_data_set.name) + + if not is_name_member: + dest_exists = data_set.DataSet.data_set_exists(dest_data_set.name) + else: + dest_exists = data_set.DataSet.data_set_exists(data_set.extract_dsname(dest_data_set.name)) + + if not dest_exists: + raise EncodeError( + "Data set {0} is not cataloged, please check data set provided in " + "the dest option.".format(data_set.extract_dsname(dest_data_set.raw_name)) + ) + + if is_name_member: + ds_type_dest = "PS" + else: + ds_type_dest = data_set.DataSet.data_set_type(dest_data_set.name) + if (not is_uss_dest) and (path.sep in dest): try: if path.isfile(src) or ds_type_src in ["PS", "VSAM"]: @@ -532,14 +615,28 @@ def run_module(): raise EncodeError("Failed when creating the {0}".format(dest)) result["dest"] = dest + if ds_type_dest == "GDG": + raise EncodeError("Encoding of a whole generation data group is not yet supported.") + + new_src = src_data_set.name if src_data_set else src + new_dest = dest_data_set.name if dest_data_set else dest + # Check if the dest is required to be backup before conversion if backup: + if backup_name: + backup_data_set = data_set.MVSDataSet(backup_name) + if backup_data_set.is_gds_active: + raise EncodeError( + f"The generation data set {backup_name} cannot be used as backup. " + "Please use a new generation for this purpose." + ) + if is_uss_dest: backup_name = zos_backup.uss_file_backup( - dest, backup_name, backup_compress + new_dest, backup_name, backup_compress ) if is_mvs_dest: - backup_name = zos_backup.mvs_file_backup(dest, backup_name, tmphlq) + backup_name = zos_backup.mvs_file_backup(new_dest, backup_name, tmphlq) result["backup_name"] = backup_name eu = encode.EncodeUtils() @@ -564,12 +661,12 @@ def run_module(): if is_uss_src and is_uss_dest: convert_rc = eu.uss_convert_encoding_prev( - src, dest, from_encoding, to_encoding + new_src, new_dest, from_encoding, to_encoding ) else: convert_rc = eu.mvs_convert_encoding( - src, - dest, + new_src, + new_dest, from_encoding, to_encoding, src_type=ds_type_src, @@ -578,12 +675,12 @@ def run_module(): if convert_rc: if is_uss_dest: - eu.uss_tag_encoding(dest, to_encoding) + eu.uss_tag_encoding(new_dest, to_encoding) changed = True - result = dict(changed=changed, src=src, dest=dest, backup_name=backup_name) + result = dict(changed=changed, src=new_src, dest=new_dest, backup_name=backup_name) else: - result = dict(src=src, dest=dest, changed=changed, backup_name=backup_name) + result = dict(src=new_src, dest=new_dest, changed=changed, backup_name=backup_name) except encode.TaggingError as e: module.fail_json( msg=e.msg, diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index cfb340fa4..9e7d40041 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -16,9 +16,12 @@ from shellescape import quote # pylint: disable-next=import-error from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +import pytest +import re __metaclass__ = type +SHELL_EXECUTABLE = "/bin/sh" USS_FILE = "/tmp/encode_data" USS_NONE_FILE = "/tmp/none" USS_DEST_FILE = "/tmp/converted_data" @@ -720,6 +723,32 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): hosts.all.zos_data_set(name=MVS_VS, state="absent") +def test_uss_encoding_conversion_src_with_special_chars(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name(symbols=True) + hosts.all.zos_data_set(name=src_data_set, state="present", type="seq") + + results = hosts.all.zos_encode( + src=src_data_set, + encoding={ + "from": FROM_ENCODING, + "to": TO_ENCODING, + }, + ) + + for result in results.contacted.values(): + assert result.get("src") == src_data_set + assert result.get("dest") == src_data_set + assert result.get("backup_name") is None + assert result.get("changed") is True + assert result.get("msg") is None + + finally: + hosts.all.zos_data_set(name=src_data_set, state="absent") + + def test_pds_backup(ansible_zos_module): try: hosts = ansible_zos_module @@ -1025,3 +1054,340 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): hosts.all.file(path=TEMP_JCL_PATH, state="absent") hosts.all.zos_data_set(name=MVS_PS, state="absent") hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + + +@pytest.mark.parametrize("generation", ["-1", "+1"]) +def test_gdg_encoding_conversion_src_with_invalid_generation(ansible_zos_module, generation): + hosts = ansible_zos_module + ds_name = get_tmp_ds_name(3, 2) + + try: + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {ds_name}") + hosts.all.shell(cmd=f"""dtouch -tseq "{ds_name}(+1)" """) + + results = hosts.all.zos_encode( + src=f"{ds_name}({generation})", + encoding={ + "from": FROM_ENCODING, + "to": TO_ENCODING, + }, + ) + + for result in results.contacted.values(): + assert result.get("msg") is not None + assert "not cataloged" in result.get("msg") + assert result.get("backup_name") is None + assert result.get("changed") is False + finally: + hosts.all.shell(cmd=f"""drm "{ds_name}(0)" """) + hosts.all.shell(cmd=f"drm {ds_name}") + + +def test_gdg_encoding_conversion_invalid_gdg(ansible_zos_module): + hosts = ansible_zos_module + ds_name = get_tmp_ds_name(3, 2) + + try: + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {ds_name}") + hosts.all.shell(cmd=f"""dtouch -tseq "{ds_name}(+1)" """) + + results = hosts.all.zos_encode( + src=ds_name, + encoding={ + "from": FROM_ENCODING, + "to": TO_ENCODING, + }, + ) + + for result in results.contacted.values(): + assert result.get("msg") is not None + assert "not yet supported" in result.get("msg") + assert result.get("backup_name") is None + assert result.get("changed") is False + assert result.get("failed") is True + finally: + hosts.all.shell(cmd=f"""drm "{ds_name}(0)" """) + hosts.all.shell(cmd=f"drm {ds_name}") + + +def test_encoding_conversion_gds_to_uss_file(ansible_zos_module): + try: + hosts = ansible_zos_module + ds_name = get_tmp_ds_name() + gds_name = f"{ds_name}(0)" + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {ds_name}") + hosts.all.shell(cmd=f"""dtouch -tseq "{ds_name}(+1)" """) + + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{gds_name}\"") + + results = hosts.all.zos_encode( + src=gds_name, + dest=USS_DEST_FILE, + encoding={ + "from": FROM_ENCODING, + "to": TO_ENCODING, + } + ) + + # Checking that we got a source of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for result in results.contacted.values(): + src = result.get("src", "") + assert ds_name in src + assert re.fullmatch(gds_pattern, src.split(".")[-1]) + + assert result.get("dest") == USS_DEST_FILE + assert result.get("changed") is True + + tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + for result in tag_results.contacted.values(): + assert TO_ENCODING in result.get("stdout") + finally: + hosts.all.file(path=USS_DEST_FILE, state="absent") + hosts.all.shell(cmd=f"""drm "{ds_name}(0)" """) + hosts.all.shell(cmd=f"drm {ds_name}") + + +def test_encoding_conversion_gds_no_dest(ansible_zos_module): + try: + hosts = ansible_zos_module + ds_name = get_tmp_ds_name() + gds_name = f"{ds_name}(0)" + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {ds_name}") + hosts.all.shell(cmd=f"""dtouch -tseq "{ds_name}(+1)" """) + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{gds_name}\"") + + results = hosts.all.zos_encode( + src=gds_name, + encoding={ + "from": FROM_ENCODING, + "to": TO_ENCODING, + } + ) + + dest_existence_check = hosts.all.shell( + cmd=f"""dcat "{gds_name}" | wc -l """, + executable=SHELL_EXECUTABLE + ) + + # Checking that we got a dest of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for result in results.contacted.values(): + src = result.get("src", "") + dest = result.get("dest", "") + + assert ds_name in src + assert re.fullmatch(gds_pattern, src.split(".")[-1]) + assert src == dest + + assert result.get("changed") is True + + for result in dest_existence_check.contacted.values(): + assert result.get("rc") == 0 + assert int(result.get("stdout")) > 0 + + finally: + hosts.all.file(path=USS_FILE, state="absent") + hosts.all.shell(cmd=f"""drm "{gds_name}" """) + hosts.all.shell(cmd=f"drm {ds_name}") + + +def test_encoding_conversion_uss_file_to_gds(ansible_zos_module): + try: + hosts = ansible_zos_module + ds_name = get_tmp_ds_name() + gds_name = f"{ds_name}(0)" + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {ds_name}") + hosts.all.shell(cmd=f"""dtouch -tseq "{ds_name}(+1)" """) + + hosts.all.shell(cmd=f"echo \"{TEST_DATA}\" > {USS_FILE}") + + results = hosts.all.zos_encode( + src=USS_FILE, + dest=gds_name, + encoding={ + "from": FROM_ENCODING, + "to": TO_ENCODING, + } + ) + + dest_existence_check = hosts.all.shell( + cmd=f"""dcat "{gds_name}" | wc -l """, + executable=SHELL_EXECUTABLE + ) + + # Checking that we got a dest of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for result in results.contacted.values(): + dest = result.get("dest", "") + assert ds_name in dest + assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + + assert result.get("src") == USS_FILE + assert result.get("changed") is True + + for result in dest_existence_check.contacted.values(): + assert result.get("rc") == 0 + assert int(result.get("stdout")) > 0 + + finally: + hosts.all.file(path=USS_FILE, state="absent") + hosts.all.shell(cmd=f"""drm "{gds_name}" """) + hosts.all.shell(cmd=f"drm {ds_name}") + + +def test_encoding_conversion_gds_to_mvs(ansible_zos_module): + try: + hosts = ansible_zos_module + src_name = get_tmp_ds_name() + dest_name = get_tmp_ds_name() + gds_name = f"{src_name}(0)" + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {src_name}") + hosts.all.shell(cmd=f"""dtouch -tseq "{src_name}(+1)" """) + hosts.all.shell(cmd=f"dtouch -tseq {dest_name}") + + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{gds_name}\"") + + results = hosts.all.zos_encode( + src=gds_name, + dest=dest_name, + encoding={ + "from": FROM_ENCODING, + "to": TO_ENCODING, + } + ) + + dest_existence_check = hosts.all.shell( + cmd=f"""dcat "{dest_name}" | wc -l """, + executable=SHELL_EXECUTABLE + ) + + # Checking that we got a source of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for result in results.contacted.values(): + src = result.get("src", "") + assert src_name in src + assert re.fullmatch(gds_pattern, src.split(".")[-1]) + + assert result.get("dest") == dest_name + assert result.get("changed") is True + + for result in dest_existence_check.contacted.values(): + assert result.get("rc") == 0 + assert int(result.get("stdout")) > 0 + finally: + hosts.all.shell(cmd=f"""drm "{src_name}(0)" """) + hosts.all.shell(cmd=f"drm {src_name}") + hosts.all.shell(cmd=f"drm {dest_name}") + + +def test_gds_encoding_conversion_when_gds_does_not_exist(ansible_zos_module): + hosts = ansible_zos_module + try: + src = get_tmp_ds_name() + gdg_name = get_tmp_ds_name() + dest = f"{gdg_name}(+1)" + + hosts.all.shell(cmd=f"dtouch -tSEQ {src}") + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {gdg_name}") + + results = hosts.all.zos_encode( + src=src, + dest=dest, + encoding={ + "from": FROM_ENCODING, + "to": TO_ENCODING, + }, + ) + + for result in results.contacted.values(): + assert result.get("src") == src + assert result.get("dest") == dest + assert result.get("backup_name") is None + assert result.get("changed") is False + assert result.get("failed") is True + assert "not cataloged" in result.get("msg", "") + finally: + hosts.all.zos_data_set(name=src, state="absent") + hosts.all.zos_data_set(name=gdg_name, state="absent") + + +def test_gds_backup(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tSEQ {src_data_set}") + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {backup_data_set}") + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{src_data_set}\"") + + results = hosts.all.zos_encode( + src=src_data_set, + encoding={ + "from": TO_ENCODING, + "to": FROM_ENCODING, + }, + backup=True, + backup_name=f"{backup_data_set}(+1)", + ) + + backup_check = hosts.all.shell( + cmd=f"""dcat "{backup_data_set}(0)" | wc -l """ + ) + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("msg") is None + + for result in backup_check.contacted.values(): + assert result.get("rc") == 0 + assert int(result.get("stdout")) > 0 + + finally: + hosts.all.shell(cmd=f"""drm "{backup_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {backup_data_set}") + hosts.all.shell(cmd=f"drm {src_data_set}") + + +def test_gds_backup_invalid_generation(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tSEQ {src_data_set}") + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {backup_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{backup_data_set}(+1)" """) + hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{src_data_set}\"") + + results = hosts.all.zos_encode( + src=src_data_set, + encoding={ + "from": TO_ENCODING, + "to": FROM_ENCODING, + }, + backup=True, + backup_name=f"{backup_data_set}(0)", + ) + + for result in results.contacted.values(): + assert result.get("failed") is True + assert result.get("changed") is False + assert result.get("msg") is not None + assert "cannot be used" in result.get("msg") + + finally: + hosts.all.shell(cmd=f"""drm "{backup_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {backup_data_set}") + hosts.all.shell(cmd=f"drm {src_data_set}") From b74eb3d002149c21e8d34abad09864da4161b75c Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 20 Jun 2024 11:40:20 -0600 Subject: [PATCH 418/495] [Enabler][zos_archive][zos_unarchive] Add support for GDGs and validate symbols support (#1511) * Added test symbols * Added tests for GDG and sepcial chars * Added changelog * Adding support for gds in zos_unarchive * Added GDS resolve function into archive * Added * Added test, example and modified docs * Added docs * Added support for archive into a GDS * Removed comments * Fixed test * Updated changelog * Updated changelog --- ...1511-zos_archive_unarchive-gdg-support.yml | 7 ++ plugins/module_utils/data_set.py | 27 ++++-- plugins/modules/zos_archive.py | 96 ++++++++++++------- plugins/modules/zos_unarchive.py | 23 +++-- .../modules/test_zos_archive_func.py | 96 ++++++++++++++++++- .../modules/test_zos_unarchive_func.py | 94 +++++++++++++++++- 6 files changed, 285 insertions(+), 58 deletions(-) create mode 100644 changelogs/fragments/1511-zos_archive_unarchive-gdg-support.yml diff --git a/changelogs/fragments/1511-zos_archive_unarchive-gdg-support.yml b/changelogs/fragments/1511-zos_archive_unarchive-gdg-support.yml new file mode 100644 index 000000000..e94c81ec2 --- /dev/null +++ b/changelogs/fragments/1511-zos_archive_unarchive-gdg-support.yml @@ -0,0 +1,7 @@ +minor_changes: + - zos_archive - Added support for GDG and GDS relative name notation to archive data sets. + Added support for data set names with special characters like $, /#, /- and @. + (https://github.com/ansible-collections/ibm_zos_core/pull/1511). + - zos_unarchive - Added support for data set names with special characters + like $, /#, /- and @. + (https://github.com/ansible-collections/ibm_zos_core/pull/1511). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 13cdf9af2..294debe64 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1030,7 +1030,7 @@ def create( original_args = locals() formatted_args = DataSet._build_zoau_args(**original_args) try: - datasets.create(**formatted_args) + data_set = datasets.create(**formatted_args) except exceptions._ZOAUExtendableException as create_exception: raise DatasetCreateError( raw_name if raw_name else name, @@ -1046,9 +1046,8 @@ def create( raw_name if raw_name else name, msg="Unable to verify the data set was created. Received DatasetVerificationError from ZOAU.", ) - # With ZOAU 1.3 we switched from getting a ZOAUResponse obj to a Dataset obj, previously we returned - # response.rc now we just return 0 if nothing failed - return 0 + changed = data_set is not None + return changed @staticmethod def delete(name): @@ -1896,7 +1895,7 @@ def __init__( # with ZOAU self.record_format = None - def create(self): + def create(self, tmp_hlq=None, replace=True, force=False): """Creates the data set in question. Returns @@ -1907,7 +1906,6 @@ def create(self): arguments = { "name" : self.name, "raw_name" : self.raw_name, - "replace" : self.replace, "type" : self.data_set_type, "space_primary" : self.space_primary, "space_secondary" : self.space_secondary, @@ -1922,11 +1920,20 @@ def create(self): "sms_data_class" : self.sms_data_class, "sms_management_class" : self.sms_management_class, "volumes" : self.volumes, - "tmp_hlq" : self.tmp_hlq, - "force" : self.force, + "tmp_hlq" : tmp_hlq, + "force" : force, } - DataSet.create(**arguments) - self.set_state("present") + formatted_args = DataSet._build_zoau_args(**arguments) + changed = False + if DataSet.data_set_exists(self.name): + DataSet.delete(self.name) + changed = True + zoau_data_set = datasets.create(**formatted_args) + if zoau_data_set is not None: + self.set_state("present") + self.name = zoau_data_set.name + return True + return changed def ensure_present(self, tmp_hlq=None, replace=False, force=False): """ Make sure that the data set is created or fail creating it. diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 713685bf9..08e2111a9 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type DOCUMENTATION = r''' @@ -36,6 +37,7 @@ - List of names or globs of UNIX System Services (USS) files, PS (sequential data sets), PDS, PDSE to compress or archive. - USS file paths should be absolute paths. + - GDS relative notation is supported. - "MVS data sets supported types are: C(SEQ), C(PDS), C(PDSE)." - VSAMs are not supported. type: list @@ -123,9 +125,9 @@ required: true exclude: description: - - Remote absolute path, glob, or list of paths, globs or data set name - patterns for the file, files or data sets to exclude from src list - and glob expansion. + - Remote absolute path, glob, or list of paths, globs, data set name + patterns or generation data sets (GDSs) in relative notation for the file, + files or data sets to exclude from src list and glob expansion. - "Patterns (wildcards) can contain one of the following, `?`, `*`." - "* matches everything." - "? matches any single character." @@ -331,7 +333,7 @@ name: tar # Archive multiple files -- name: Compress list of files into a zip +- name: Archive list of files into a zip zos_archive: src: - /tmp/archive/foo.txt @@ -341,7 +343,7 @@ name: zip # Archive one data set into terse -- name: Compress data set into a terse +- name: Archive data set into a terse zos_archive: src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" @@ -349,7 +351,7 @@ name: terse # Use terse with different options -- name: Compress data set into a terse, specify pack algorithm and use adrdssu +- name: Archive data set into a terse, specify pack algorithm and use adrdssu zos_archive: src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" @@ -360,13 +362,34 @@ use_adrdssu: true # Use a pattern to store -- name: Compress data set pattern using xmit +- name: Archive data set pattern using xmit zos_archive: src: "USER.ARCHIVE.*" exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: name: xmit + +- name: Archive multiple GDSs into a terse + zos_archive: + src: + - "USER.GDG(0)" + - "USER.GDG(-1)" + - "USER.GDG(-2)" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + format_options: + use_adrdssu: True + +- name: Archive multiple data sets into a new GDS + zos_archive: + src: "USER.ARCHIVE.*" + dest: "USER.GDG(+1)" + format: + name: terse + format_options: + use_adrdssu: True ''' RETURN = r''' @@ -415,27 +438,22 @@ returned: always ''' -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_bytes -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - better_arg_parser, - data_set, - validation, - mvs_cmd, -) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( - ZOAUImportError, -) -import os -import tarfile -import zipfile import abc import glob -import re import math +import os +import re +import tarfile import traceback +import zipfile from hashlib import sha256 +from ansible.module_utils._text import to_bytes +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser, data_set, mvs_cmd, validation) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \ + ZOAUImportError try: from zoautil_py import datasets @@ -1278,11 +1296,17 @@ def expand_mvs_paths(self, paths): """ expanded_path = [] for path in paths: + e_path = [] if '*' in path: # list_dataset_names returns a list of data set names or empty. e_paths = datasets.list_dataset_names(path) else: e_paths = [path] + + # resolve GDS relative names + for index, e_path in enumerate(e_paths): + if data_set.DataSet.is_gds_relative_name(e_path): + e_paths[index] = data_set.DataSet.resolve_gds_absolute_name(e_path) expanded_path.extend(e_paths) return expanded_path @@ -1415,17 +1439,18 @@ def archive_targets(self): self.module.fail_json( msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") source = self.targets[0] - # dest = self.create_dest_ds(self.dest) - dest, changed = self._create_dest_data_set( + dataset = data_set.MVSDataSet( name=self.dest, - replace=True, - type='seq', + data_set_type='seq', record_format='fb', record_length=AMATERSE_RECORD_LENGTH, space_primary=self.dest_data_set.get("space_primary"), - space_type=self.dest_data_set.get("space_type")) + space_type=self.dest_data_set.get("space_type") + ) + changed = dataset.create(replace=True) self.changed = self.changed or changed - self.add(source, dest) + self.dest = dataset.name + self.add(source, self.dest) self.clean_environment(data_sets=self.tmp_data_sets) @@ -1509,16 +1534,19 @@ def archive_targets(self): msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") source = self.sources[0] # dest = self.create_dest_ds(self.dest) - dest, changed = self._create_dest_data_set( + dataset = data_set.MVSDataSet( name=self.dest, - replace=True, - type='seq', + data_set_type='seq', record_format='fb', record_length=XMIT_RECORD_LENGTH, space_primary=self.dest_data_set.get("space_primary"), - space_type=self.dest_data_set.get("space_type")) + space_type=self.dest_data_set.get("space_type") + ) + changed = dataset.create(replace=True) + self.changed = self.changed or changed self.changed = self.changed or changed - self.add(source, dest) + self.dest = dataset.name + self.add(source, self.dest) self.clean_environment(data_sets=self.tmp_data_sets) def get_error_hint(self, output): diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index cb587dc0e..8a5a53bc9 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -35,7 +35,7 @@ - The remote absolute path or data set of the archive to be uncompressed. - I(src) can be a USS file or MVS data set name. - USS file paths should be absolute paths. - - MVS data sets supported types are C(SEQ), C(PDS), C(PDSE). + - GDS relative names are supported C(e.g. USER.GDG(-1)). type: str required: true format: @@ -145,6 +145,7 @@ description: - A list of directories, files or data set names to extract from the archive. + - GDS relative names are supported C(e.g. USER.GDG(-1)). - When C(include) is set, only those files will we be extracted leaving the remaining files in the archive. - Mutually exclusive with exclude. @@ -155,6 +156,7 @@ description: - List the directory and file or data set names that you would like to exclude from the unarchive action. + - GDS relative names are supported C(e.g. USER.GDG(-1)). - Mutually exclusive with include. type: list elements: str @@ -349,6 +351,13 @@ - USER.ARCHIVE.TEST1 - USER.ARCHIVE.TEST2 +# Unarchive a GDS +- name: Unarchive a terse data set and excluding data sets from unpacking. + zos_unarchive: + src: "USER.ARCHIVE(0)" + format: + name: terse + # List option - name: List content from XMIT zos_unarchive: @@ -623,6 +632,8 @@ def __init__(self, module): self.dest_data_set = module.params.get("dest_data_set") self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set self.source_size = 0 + if data_set.DataSet.is_gds_relative_name(self.src): + self.src = data_set.DataSet.resolve_gds_absolute_name(self.src) def dest_type(self): return "MVS" @@ -709,14 +720,14 @@ def _create_dest_data_set( def _get_include_data_sets_cmd(self): include_cmd = "INCL( " for include_ds in self.include: - include_cmd += " '{0}', - \n".format(include_ds) + include_cmd += " '{0}', - \n".format(include_ds.upper()) include_cmd += " ) - \n" return include_cmd def _get_exclude_data_sets_cmd(self): exclude_cmd = "EXCL( - \n" for exclude_ds in self.exclude: - exclude_cmd += " '{0}', - \n".format(exclude_ds) + exclude_cmd += " '{0}', - \n".format(exclude_ds.upper()) exclude_cmd += " ) - \n" return exclude_cmd @@ -1143,13 +1154,13 @@ def run_module(): module.fail_json(msg="Parameter verification failed", stderr=str(err)) unarchive = get_unarchive_handler(module) + if not unarchive.src_exists(): + module.fail_json(msg="{0} does not exists, please provide a valid src.".format(module.params.get("src"))) + if unarchive.list: unarchive.list_archive_content() module.exit_json(**unarchive.result) - if not unarchive.src_exists(): - module.fail_json(msg="{0} does not exists, please provide a valid src.".format(module.params.get("src"))) - unarchive.extract_src() if unarchive.dest_unarchived() and unarchive.dest_type() == "USS": diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index e01994138..92b5ab4fd 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -705,8 +705,8 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, forma def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - archive_data_set = get_tmp_ds_name() - src_data_set = get_tmp_ds_name(5, 4) + archive_data_set = get_tmp_ds_name(symbols=True) + src_data_set = get_tmp_ds_name(5, 4, symbols=True) HLQ = "ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, base_name=src_data_set, @@ -751,8 +751,8 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, format, d assert ds.get("name") in result.get("archived") assert ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd="drm {0}*".format(src_data_set)) - hosts.all.zos_data_set(name=archive_data_set, state="absent") + hosts.all.shell(cmd="drm {0}.*".format(HLQ)) + @pytest.mark.ds @pytest.mark.parametrize( @@ -917,3 +917,91 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, format, data_ hosts.all.shell(cmd='rm -r /tmp/disp_shr') hosts.all.zos_data_set(name=src_data_set, state="absent") hosts.all.zos_data_set(name=archive_data_set, state="absent") + + +@pytest.mark.ds +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) +def test_gdg_archive(ansible_zos_module, dstype, format): + try: + HLQ = "ANSIBLE" + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name(symbols=True) + archive_data_set = get_tmp_ds_name(symbols=True) + results = hosts.all.zos_data_set(name=data_set_name, state="present", type="gdg", limit=3) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="spack") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src=[f"{data_set_name}(0)",f"{data_set_name}(-1)" ], + dest=archive_data_set, + format=format_dict, + ) + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == archive_data_set + assert f"{data_set_name}.G0001V00" in result.get("archived") + assert f"{data_set_name}.G0002V00" in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert archive_data_set in c_result.get("stdout") + finally: + hosts.all.shell(cmd=f"drm {HLQ}.*") + + +@pytest.mark.ds +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) +def test_archive_into_gds(ansible_zos_module, dstype, format): + try: + HLQ = "ANSIBLE" + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name(symbols=True) + archive_data_set = get_tmp_ds_name(symbols=True) + results = hosts.all.zos_data_set( + batch = [ + {"name":archive_data_set, "state":"present", "type":"gdg", "limit":3}, + {"name":data_set_name, "state":"present", "type":dstype} + ] + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="spack") + format_dict["format_options"].update(use_adrdssu=True) + archive_result = hosts.all.zos_archive( + src=data_set_name, + dest=f"{archive_data_set}(+1)", + format=format_dict, + ) + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert data_set_name in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert archive_data_set in c_result.get("stdout") + finally: + hosts.all.shell(cmd=f"drm {HLQ}.*") + diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 37697da80..ea5f0521d 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -571,8 +571,8 @@ def test_mvs_unarchive_single_data_set_use_adrdssu(ansible_zos_module, format, d def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, data_set): try: hosts = ansible_zos_module - MVS_DEST_ARCHIVE = get_tmp_ds_name() - DATASET = get_tmp_ds_name(3,3) + MVS_DEST_ARCHIVE = get_tmp_ds_name(symbols=True) + DATASET = get_tmp_ds_name(3,3,symbols=True) HLQ ="ANSIBLE" target_ds_list = create_multiple_data_sets(ansible_zos_module=hosts, base_name=DATASET, @@ -593,7 +593,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, # Write some content into src test_line = "this is a test line" for ds in ds_to_write: - hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name").replace('$', '\\$'))) format_dict = dict(name=format, format_options=dict()) if format == "terse": @@ -605,7 +605,7 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, format, format=format_dict, ) # remote data_sets from host - hosts.all.shell(cmd="drm {0}*".format(DATASET)) + hosts.all.shell(cmd="drm {0}*".format(DATASET.replace("$", "/$"))) if format == "terse": del format_dict["format_options"]["terse_pack"] @@ -1099,3 +1099,89 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): assert result.get("failed", False) is True finally: tmp_folder.cleanup() + +@pytest.mark.ds +@pytest.mark.parametrize( + "format", [ + "terse", + "xmit", + ]) +@pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) +def test_gdg_unarchive(ansible_zos_module, dstype, format): + try: + HLQ = "ANSIBLE" + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name(symbols=True) + archive_data_set = get_tmp_ds_name(symbols=True) + results = hosts.all.zos_data_set( + batch = [ + { "name":data_set_name, "state":"present", "type":"gdg", "limit":3}, + { "name":f"{data_set_name}(+1)", "state":"present", "type":dstype}, + { "name":f"{data_set_name}(+1)", "state":"present", "type":dstype}, + ] + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + + target_ds_list = [f"{data_set_name}.G0001V00", f"{data_set_name}.G0002V00"] + ds_to_write = target_ds_list + if dstype in ["pds", "pdse"]: + target_member_list = [] + for ds in target_ds_list: + target_member_list.extend( + create_multiple_members(ansible_zos_module=hosts, + pds_name=ds, + member_base_name="MEM", + n=2 + ) + ) + ds_to_write = target_member_list + # Write some content into src + test_line = "this is a test line" + for ds in ds_to_write: + hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds)) + + format_dict = dict(name=format, format_options=dict()) + if format == "terse": + format_dict["format_options"] = dict(terse_pack="spack") + format_dict["format_options"].update(use_adrdssu=True) + if format == "terse": + del format_dict["format_options"]["terse_pack"] + archive_result = hosts.all.zos_archive( + src=[f"{data_set_name}(0)",f"{data_set_name}(-1)" ], + dest=archive_data_set, + format=format_dict, + ) + for result in archive_result.contacted.values(): + assert result.get("changed") is True + assert result.get("dest") == archive_data_set + assert f"{data_set_name}.G0001V00" in result.get("archived") + assert f"{data_set_name}.G0002V00" in result.get("archived") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert archive_data_set in c_result.get("stdout") + + hosts.all.zos_data_set( + batch=[ + {"name": f"{data_set_name}(-1)", "state": "absent", "type": "gdg"}, + {"name": f"{data_set_name}(0)", "state": "absent", "type": "gdg"}, + ] + ) + unarchive_result = hosts.all.zos_unarchive( + src=archive_data_set, + format=format_dict, + remote_src=True + ) + for result in unarchive_result.contacted.values(): + assert result.get("changed") is True + assert len(result.get("missing")) == 0 + assert f"{data_set_name}.G0001V00" in result.get("targets") + assert f"{data_set_name}.G0002V00" in result.get("targets") + cmd_result = hosts.all.shell(cmd = "dls {0}.*".format(HLQ)) + for c_result in cmd_result.contacted.values(): + assert f"{data_set_name}.G0001V00" in c_result.get("stdout") + assert f"{data_set_name}.G0002V00" in c_result.get("stdout") + finally: + hosts.all.shell(cmd=f"drm {HLQ}.*") + From 65c5588b2a99a78355f0651291032d31a3516eba Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 28 Jun 2024 10:39:54 -0600 Subject: [PATCH 419/495] Merged cherry pick (#1548) --- .ansible-lint | 1 + CHANGELOG.rst | 348 +----------------- README.md | 110 +++--- changelogs/changelog.yaml | 14 + changelogs/fragments/v1.10.0_summary.yml | 6 + docs/source/modules/zos_copy.rst | 4 + docs/source/modules/zos_operator.rst | 2 +- docs/source/modules/zos_script.rst | 6 + docs/source/release_notes.rst | 20 +- .../source/resources/releases_maintenance.rst | 14 +- galaxy.yml | 2 +- meta/ibm_zos_core_meta.yml | 2 +- plugins/modules/zos_copy.py | 9 + plugins/modules/zos_operator.py | 2 +- plugins/modules/zos_script.py | 6 + 15 files changed, 118 insertions(+), 428 deletions(-) create mode 100644 changelogs/fragments/v1.10.0_summary.yml diff --git a/.ansible-lint b/.ansible-lint index 9d40faf3b..562033475 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -20,6 +20,7 @@ exclude_paths: - changelogs/ - collections/ - docs/ + - importer_result.json - scripts/ - test_config.yml - tests/*.ini diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 74556dc14..9efc1ea61 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,13 +4,13 @@ ibm.ibm\_zos\_core Release Notes .. contents:: Topics -v1.10.0-beta.1 -============== +v1.10.0 +======= Release Summary --------------- -Release Date: '2024-05-08' +Release Date: '2024-06-11' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review @@ -237,348 +237,6 @@ New Modules - ibm.ibm_zos_core.zos_archive - Archive files and data sets on z/OS. - ibm.ibm_zos_core.zos_unarchive - Unarchive files and data sets in z/OS. -v1.9.0 -====== - -Release Summary ---------------- - -Release Date: '2024-03-11' -This changelog describes all changes made to the modules and plugins included -in this collection. The release date is the date the changelog is created. -For additional details such as required dependencies and availability review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - -Major Changes -------------- - -- zos_job_submit - when job statuses were read, were limited to AC (active), CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC (security error), JCLERROR (job had a jcl error). Now the additional statuses are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - -Minor Changes -------------- - -- zos_apf - Improves exception handling if there is a failure parsing the command response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). -- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). -- zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). -- zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). -- zos_job_submit - The module had undocumented parameter and uses as temporary file when the location of the file is LOCAL. Change now uses the same name as the src for the temporary file removing the addition of tmp_file to the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). -- zos_job_submit - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). -- zos_mvs_raw - when using the dd_input content option for instream-data, if the content was not properly indented according to the program which is generally a blank in columns 1 & 2, those columns would be truncated. Now, when setting instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all content types; string, list of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - zos_mvs_raw - no examples were included with the module that demonstrated using a YAML block indicator, this now includes examples using a YAML block indicator. -- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). - -Bugfixes --------- - -- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). -- zos_apf - When operation=list was selected and more than one data set entry was fetched, the module only returned one data set. Fix now returns the complete list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). -- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). -- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). -- zos_data_set - Fixes a small parsing bug in module_utils/data_set function which extracts volume serial(s) from a LISTCAT command output. Previously a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). -- zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). -- zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). -- zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). -- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). -- zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained JCLERROR followed by an integer where the integer appeared to be a reason code when actually it is a multi line marker used to coordinate errors spanning more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when a response was returned, it contained an undocumented property; ret_code[msg_text]. Now when a response is returned, it correctly returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when typrun=copy was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when typrun=hold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when typrun=jchhold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when typrun=scan was used in JCL, it would fail the module. Now typrun=scan no longer fails the module and an appropriate message is returned with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when wait_time_s was used, the duration would run approximately 5 second longer than reported in the duration. Now the when duration is returned, it is the actual accounting from when the job is submitted to when the module reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). -- zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). -- zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). - -v1.8.0 -====== - -Release Summary ---------------- - -Release Date: '2023-12-08' -This changelog describes all changes made to the modules and plugins included -in this collection. The release date is the date the changelog is created. -For additional details such as required dependencies and availability review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - -Minor Changes -------------- - -- module_utils/template - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) -- zos_archive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) -- zos_archive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) -- zos_copy - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) -- zos_copy - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). -- zos_copy - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) -- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) -- zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) -- zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) -- zos_job_submit - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). -- zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) -- zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) -- zos_script - Add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). -- zos_tso_command - Add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). -- zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) -- zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) - -Deprecated Features -------------------- - -- zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). - -Bugfixes --------- - -- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). -- zos_copy - When copying an executable data set from controller to managed node, copy operation failed with an encoding error. Fix now avoids encoding when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). -- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). -- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). -- zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) -- zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) -- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) -- zos_operator - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. https://github.com/ansible-collections/ibm_zos_core/pull/918) -- zos_operator - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). -- zos_operator_action_query - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). -- zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). - -Known Issues ------------- - -- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) (https://github.com/ansible-collections/ibm_zos_core/issues/972) -- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) - -New Modules ------------ - -- ibm.ibm_zos_core.zos_script - Run scripts in z/OS - -v1.7.0 -====== - -Release Summary ---------------- - -Release Date: '2023-10-09' -This changelog describes all changes made to the modules and plugins included -in this collection. The release date is the date the changelog is created. -For additional details such as required dependencies and availability review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - -Major Changes -------------- - -- zos_copy - Previously, backups were taken when force was set to false; whether or not a user specified this operation which caused allocation issues with space and permissions. This removes the automatic backup performed and reverts to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) - -Minor Changes -------------- - -- Add support for Jinja2 templates in zos_copy and zos_job_submit when using local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) -- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, space_type and type in the return output when the destination data set does not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) -- zos_data_set - record format = 'F' has been added to support 'fixed' block records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) -- zos_job_output - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, and queue_position to the return output when querying or submitting a job. Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) -- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. This change removes those resulting in a performance increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) -- zos_job_query - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_job_submit - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - -Bugfixes --------- - -- module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). -- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) -- zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) -- zos_copy - Reported a false positive such that the response would have `changed=true` when copying from a source (src) or destination (dest) data set that was in use (DISP=SHR). This change now displays an appropriate error message and returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). -- zos_copy - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. This change ensures proper test coverage for nested directories and file permissions. (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. Fix now encodes all content in a source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). -- zos_copy - kept permissions on target directory when copy overwrote files. The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) -- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). -- zos_fetch - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_job_output - Error message did not specify the job not found. Fix now specifies the job_id or job_name being searched to ensure more information is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) -- zos_operator - Reported a failure caused by unrelated error response. Fix now gives a transparent response of the operator to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/762). - -New Modules ------------ - -- ibm.ibm_zos_core.zos_archive - Archive files and data sets on z/OS. -- ibm.ibm_zos_core.zos_unarchive - Unarchive files and data sets in z/OS. - -v1.9.0 -====== - -Release Summary ---------------- - -Release Date: '2024-03-11' -This changelog describes all changes made to the modules and plugins included -in this collection. The release date is the date the changelog is created. -For additional details such as required dependencies and availability review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - -Major Changes -------------- - -- zos_job_submit - when job statuses were read, were limited to AC (active), CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC (security error), JCLERROR (job had a jcl error). Now the additional statuses are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - -Minor Changes -------------- - -- zos_apf - Improves exception handling if there is a failure parsing the command response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). -- zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). -- zos_job_output - When passing a job ID and owner the module take as mutually exclusive. Change now allows the use of a job ID and owner at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). -- zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). -- zos_job_submit - The module had undocumented parameter and uses as temporary file when the location of the file is LOCAL. Change now uses the same name as the src for the temporary file removing the addition of tmp_file to the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). -- zos_job_submit - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). -- zos_mvs_raw - when using the dd_input content option for instream-data, if the content was not properly indented according to the program which is generally a blank in columns 1 & 2, those columns would be truncated. Now, when setting instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all content types; string, list of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - zos_mvs_raw - no examples were included with the module that demonstrated using a YAML block indicator, this now includes examples using a YAML block indicator. -- zos_tso_command - add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). - -Bugfixes --------- - -- module_utils/job.py - job output containing non-printable characters would crash modules. Fix now handles the error gracefully and returns a message to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). -- zos_apf - When operation=list was selected and more than one data set entry was fetched, the module only returned one data set. Fix now returns the complete list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). -- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). -- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). -- zos_data_set - Fixes a small parsing bug in module_utils/data_set function which extracts volume serial(s) from a LISTCAT command output. Previously a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). -- zos_job_output - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). -- zos_job_query - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). -- zos_job_query - When passing a job ID or name less than 8 characters long, the module sent the full stack trace as the module's message. Change now allows the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). -- zos_job_submit - Was ignoring the default value for location=DATA_SET, now when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). -- zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained JCLERROR followed by an integer where the integer appeared to be a reason code when actually it is a multi line marker used to coordinate errors spanning more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when a response was returned, it contained an undocumented property; ret_code[msg_text]. Now when a response is returned, it correctly returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when typrun=copy was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when typrun=hold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when typrun=jchhold was used in JCL it would fail the module with an improper message and error condition. While this case continues to be considered a failure, the message has been corrected and it fails under the condition that not enough time has been added to the modules execution. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when typrun=scan was used in JCL, it would fail the module. Now typrun=scan no longer fails the module and an appropriate message is returned with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_job_submit - when wait_time_s was used, the duration would run approximately 5 second longer than reported in the duration. Now the when duration is returned, it is the actual accounting from when the job is submitted to when the module reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). -- zos_operator - The module handling ZOAU import errors obscured the original traceback when an import error ocurred. Fix now passes correctly the context to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). -- zos_unarchive - Using a local file with a USS format option failed when sending to remote because dest_data_set option had an empty dictionary. Fix now leaves dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). -- zos_unarchive - When unarchiving USS files, the module left temporary files on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). - -v1.8.0 -====== - -Release Summary ---------------- - -Release Date: '2023-12-08' -This changelog describes all changes made to the modules and plugins included -in this collection. The release date is the date the changelog is created. -For additional details such as required dependencies and availability review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - -Minor Changes -------------- - -- module_utils/template - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) -- zos_archive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) -- zos_archive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) -- zos_copy - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) -- zos_copy - Add new option `force_lock` that can copy into data sets that are already in use by other processes (DISP=SHR). User needs to use with caution because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). -- zos_copy - includes a new option `executable` that enables copying of executables such as load modules or program objects to both USS and partitioned data sets. When the `dest` option contains a non-existent data set, `zos_copy` will create a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) -- zos_copy - introduces a new option 'aliases' to enable preservation of member aliases when copying data to partitioned data sets (PDS) destinations from USS or other PDS sources. Copying aliases of text based members to/from USS is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) -- zos_fetch - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/962) -- zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) -- zos_job_submit - Previous code did not return output, but still requested job data from the target system. This changes to honor return_output=false by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). -- zos_operator - Changed system to call 'wait=true' parameter to zoau call. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) -- zos_operator_action_query - Add a max delay of 5 seconds on each part of the operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) -- zos_script - Add support for remote_tmp from the Ansible configuration to setup where temporary files will be created, replacing the module option tmp_path. (https://github.com/ansible-collections/ibm_zos_core/pull/1068). -- zos_tso_command - Add example for executing explicitly a REXX script from a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). -- zos_unarchive - Add validation into path joins to detect unauthorized path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) -- zos_unarchive - Enhanced test cases to use test lines the same length of the record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) - -Deprecated Features -------------------- - -- zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). - -Bugfixes --------- - -- zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). -- zos_copy - When copying an executable data set from controller to managed node, copy operation failed with an encoding error. Fix now avoids encoding when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). -- zos_copy - When copying an executable data set with aliases and destination did not exist, destination data set was created with wrong attributes. Fix now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). -- zos_copy - When performing a copy operation to an existing file, the copied file resulted in having corrupted contents. Fix now implements a workaround to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). -- zos_job_submit - Temporary files were created in tmp directory. Fix now ensures the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) -- zos_job_submit - The last line of the jcl was missing in the input. Fix now ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) -- zos_lineinfile - A duplicate entry was made even if line was already present in the target file. Fix now prevents a duplicate entry if the line already exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) -- zos_operator - The last line of the operator was missing in the response of the module. The fix now ensures the presence of the full output of the operator. https://github.com/ansible-collections/ibm_zos_core/pull/918) -- zos_operator - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). -- zos_operator_action_query - The module was ignoring the wait time argument. The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). -- zos_unarchive - When zos_unarchive fails during unpack either with xmit or terse it does not clean the temporary data sets created. Fix now removes the temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). - -Known Issues ------------- - -- Several modules have reported UTF8 decoding errors when interacting with results that contain non-printable UTF8 characters in the response. This occurs when a module receives content that does not correspond to a UTF-8 value. These include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` but are not limited to this list. This will be addressed in `ibm_zos_core` version 1.10.0-beta.1. Each case is unique, some options to work around the error are below. - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Add `ignore_errors:true` to the playbook task so the task error will not fail the playbook. - If the error is resulting from a batch job, add `ignore_errors:true` to the task and capture the output into a variable and extract the job ID with a regular expression and then use `zos_job_output` to display the DD without the non-printable character such as the DD `JESMSGLG`. (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) (https://github.com/ansible-collections/ibm_zos_core/issues/972) -- With later versions of `ansible-core` used with `ibm_zos_core` collection a warning has started to appear "Module "ansible.builtin.command" returned non UTF-8 data in the JSON response" that is currently being reviewed. There are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) - -New Modules ------------ - -- ibm.ibm_zos_core.zos_script - Run scripts in z/OS - -v1.7.0 -====== - -Release Summary ---------------- - -Release Date: '2023-10-09' -This changelog describes all changes made to the modules and plugins included -in this collection. The release date is the date the changelog is created. -For additional details such as required dependencies and availability review -the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ - -Major Changes -------------- - -- zos_copy - Previously, backups were taken when force was set to false; whether or not a user specified this operation which caused allocation issues with space and permissions. This removes the automatic backup performed and reverts to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) - -Minor Changes -------------- - -- Add support for Jinja2 templates in zos_copy and zos_job_submit when using local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) -- zos_archive - If destination data set space is not provided then the module computes it based on the src list and/or expanded src list based on pattern provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_archive - When xmit faces a space error in xmit operation because of dest or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, space_type and type in the return output when the destination data set does not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) -- zos_data_set - record format = 'F' has been added to support 'fixed' block records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) -- zos_job_output - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, and queue_position to the return output when querying or submitting a job. Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) -- zos_job_query - unnecessary calls were made to find a jobs DDs that incurred unnecessary overhead. This change removes those resulting in a performance increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) -- zos_job_query - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_job_submit - zoau added 'program_name' to their field output starting with v1.2.4. This enhancement checks for that version and passes the extra column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) -- zos_unarchive - When copying to remote fails now a proper error message is displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_unarchive - When copying to remote if space_primary is not defined, then is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - -Bugfixes --------- - -- module_utils - data_set.py - Reported a failure caused when cataloging a VSAM data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). -- zos_archive - Module did not return the proper src state after archiving. Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). -- zos_blockinfile - Test case generate a data set that was not correctly removed. Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) -- zos_copy - Module returned the dynamic values created with the same dataset type and record format. Fix validate the correct dataset type and record format of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) -- zos_copy - Reported a false positive such that the response would have `changed=true` when copying from a source (src) or destination (dest) data set that was in use (DISP=SHR). This change now displays an appropriate error message and returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). -- zos_copy - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. This change ensures proper test coverage for nested directories and file permissions. (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_copy - Zos_copy did not encode inner content inside subdirectories once the source was copied to the destination. Fix now encodes all content in a source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). -- zos_copy - kept permissions on target directory when copy overwrote files. The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) -- zos_data_set - Reported a failure caused when `present=absent` for a VSAM data set leaving behind cluster components. Fix introduces a new logical flow that will evaluate the volumes, compare it to the provided value and if necessary catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). -- zos_fetch - Reported a warning about the use of _play_context.verbosity.This change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). -- zos_job_output - Error message did not specify the job not found. Fix now specifies the job_id or job_name being searched to ensure more information is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) -- zos_operator - Reported a failure caused by unrelated error response. Fix now gives a transparent response of the operator to avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/762). - -New Modules ------------ - -- ibm.ibm_zos_core.zos_archive - Archive files and data sets on z/OS. -- ibm.ibm_zos_core.zos_unarchive - Unarchive files and data sets in z/OS. - v1.6.0 ====== diff --git a/README.md b/README.md index 9b33194eb..0146087a0 100644 --- a/README.md +++ b/README.md @@ -6,25 +6,14 @@ The **IBM z/OS core** collection enables Ansible to interact with z/OS Data Sets The **IBM z/OS core** collection is part of the **Red Hat® Ansible Certified Content for IBM Z®** offering that brings Ansible automation to IBM Z®. This collection brings forward the possibility to manage batch jobs, perform program authorizations, run operator operations, and execute both JES and MVS commands as well as execute shell, python, and REXX scripts. It supports data set creation, searching, copying, fetching, and encoding. It provides both archiving and unarchiving of data sets, initializing volumes, performing backups and supports Jinja templating. -System programmers can enable pipelines to setup, tear down and deploy applications while system administrators can automate time consuming repetitive tasks inevitably freeing up their time. New z/OS users can find comfort in Ansible's familiarity and expedite their proficiency in record time. + +<br/>System programmers can enable pipelines to setup, tear down and deploy applications while system administrators can automate time consuming repetitive tasks inevitably freeing up their time. New z/OS users can find comfort in Ansible's familiarity and expedite their proficiency in record time. ## Requirements -Before you install the IBM z/OS core collection, you must configure a control node and managed node with a minimum set of requirements. +Before you install the IBM z/OS core collection, you must configure the control node and z/OS managed node with a minimum set of requirements. The following [table](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/releases_maintenance.html) details the specific software requirements for the controller and managed node. -### Ansible Controller - -This release of the collection requires **ansible-core >=2.15** (Ansible >=8.x), for additional requirements such as Python, review the [support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). - -### Managed Node - -This release of the collection requires the following -* [z/OS](https://www.ibm.com/docs/en/zos) V2R4 (or later) but prior to version V3R1. -* [z/OS shell](https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm). -* [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) 3.9 - 3.11. -* [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau) 1.3.0 or later. - ## Installation Before using this collection, you need to install it with the Ansible Galaxy command-line tool: @@ -33,53 +22,44 @@ Before using this collection, you need to install it with the Ansible Galaxy com ansible-galaxy collection install ibm.ibm_zos_core ``` -<p> </p> -You can also include it in a requirements.yml file and install it with `ansible-galaxy collection install -r requirements.yml`, using the format: +<br/>You can also include it in a requirements.yml file and install it with `ansible-galaxy collection install -r requirements.yml`, using the format: ```sh collections: - name: ibm.ibm_zos_core ``` -<p> </p> -Note that if you install the collection from Ansible Galaxy, it will not be upgraded automatically when you upgrade the Ansible package. +<br/>Note that if you install the collection from Ansible Galaxy, it will not be upgraded automatically when you upgrade the Ansible package. To upgrade the collection to the latest available version, run the following command: ```sh ansible-galaxy collection install ibm.ibm_zos_core --upgrade ``` -<p> </p> -You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax to install version 1.0.0: +<br/>You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax to install version 1.0.0: ```sh ansible-galaxy collection install ibm.ibm_zos_core:1.0.0 ``` -<p> </p> -You can also install a beta version of the collection. A beta version is only available on Galaxy and is only supported by the community until it is promoted to General Availability (GA). Use the following syntax to install a beta version: +<br/>You can also install a beta version of the collection. A beta version is only available on Galaxy and is only supported by the community until it is promoted to General Availability (GA). Use the following syntax to install a beta version: ```sh ansible-galaxy collection install ibm.ibm_zos_core:1.10.0-beta.1 ``` -<p> </p> -As part of the installation, the collection [requirements](#Requirements) must be made available to Ansible through the use of [environment variables](https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/zos_core/configuration_guide.md#environment-variables). The preferred configuration is to place the environment variables in `group_vars` and `host_vars`, you can find examples of this configuration under any [playbook project](https://github.com/IBM/z_ansible_collections_samples), for example, review the **data set** example [configuration](https://github.com/IBM/z_ansible_collections_samples/tree/main/zos_concepts/data_sets/data_set_basics#configuration) documentation. +<br/>As part of the installation, the collection [requirements](#Requirements) must be made available to Ansible through the use of [environment variables](https://github.com/IBM/z_ansible_collections_samples/blob/main/docs/share/zos_core/configuration_guide.md#environment-variables). The preferred configuration is to place the environment variables in `group_vars` and `host_vars`, you can find examples of this configuration under any [playbook project](https://github.com/IBM/z_ansible_collections_samples), for example, review the **data set** example [configuration](https://github.com/IBM/z_ansible_collections_samples/tree/main/zos_concepts/data_sets/data_set_basics#configuration) documentation. -<p> </p> -If you are testing a configuration, it can be helpful to set the environment variables in a playbook, an example of that can be reviewed [here](https://github.com/ansible-collections/ibm_zos_core/discussions/657). +<br/>If you are testing a configuration, it can be helpful to set the environment variables in a playbook, an example of that can be reviewed [here](https://github.com/ansible-collections/ibm_zos_core/discussions/657). -<p> </p> -To learn more about the ZOAU Python wheel installation method, review the [documentation](https://www.ibm.com/docs/en/zoau/1.3.x?topic=installing-zoau#python-wheel-installation-method). +<br/>To learn more about the ZOAU Python wheel installation method, review the [documentation](https://www.ibm.com/docs/en/zoau/1.3.x?topic=installing-zoau#python-wheel-installation-method). -<p> </p> -If the wheel is installed using the `--target` option, it will install the package into the specified target directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /usr/zoau/wheels`. Using `--target` is recommended, else the wheel will be installed in Python's home directory which may not have write permissions or persist +<br/>If the wheel is installed using the `--target` option, it will install the package into the specified target directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /usr/zoau/wheels`. Using `--target` is recommended, else the wheel will be installed in Python's home directory which may not have write permissions or persist after an update. -<p> </p> -If the wheel is installed using the `--user` option, it will install the package into the user directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /u/user`. +<br/>If the wheel is installed using the `--user` option, it will install the package into the user directory. The environment variable `PYTHONPATH` will have to be configured to where the packages is installed, e.g; `PYTHONPATH: /u/user`. -Environment variables: +<br/>Environment variables: ```sh PYZ: "path_to_python_installation_on_zos_target" @@ -145,19 +125,18 @@ environment_vars: All releases, including betas will meet the following test criteria. - * 100% success for [Functional](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/functional) tests. - * 100% success for [Sanity](https://docs.ansible.com/ansible/latest/dev_guide/testing/sanity/index.html#all-sanity-tests) tests as part of [ansible-test](https://docs.ansible.com/ansible/latest/dev_guide/testing.html#run-sanity-tests). - * 100% success for [pyflakes](https://github.com/PyCQA/pyflakes/blob/main/README.rst). - * 100% success for [ansible-lint](https://ansible.readthedocs.io/projects/lint/) allowing only false positives. +* 100% success for [Functional](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/functional) tests. +* 100% success for [Sanity](https://docs.ansible.com/ansible/latest/dev_guide/testing/sanity/index.html#all-sanity-tests) tests as part of[ansible-test](https://docs.ansible.com/ansible/latest/dev_guide/testing.html#run-sanity-tests). +* 100% success for [pyflakes](https://github.com/PyCQA/pyflakes/blob/main/README.rst). +* 100% success for [ansible-lint](https://ansible.readthedocs.io/projects/lint/) allowing only false positives. -<p> </p> -This release of the collection was tested with following dependencies. +<br/>This release of the collection was tested with following dependencies. - * ansible-core v2.15.x - * Python 3.9.x - * IBM Open Enterprise SDK for Python 3.11.x - * IBM Z Open Automation Utilities (ZOAU) 1.3.0.x - * z/OS V2R5 +* ansible-core v2.15.x +* Python 3.9.x +* IBM Open Enterprise SDK for Python 3.11.x +* IBM Z Open Automation Utilities (ZOAU) 1.3.0.x +* z/OS V2R5 This release introduces case sensitivity for option values and includes a porting guide in the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) to assist with which option values will need to be updated. @@ -165,28 +144,26 @@ This release introduces case sensitivity for option values and includes a portin This community is not currently accepting contributions. However, we encourage you to open [git issues](https://github.com/ansible-collections/ibm_zos_core/issues) for bugs, comments or feature requests and check back periodically for when community contributions will be accepted in the near future. -Review the [development docs](https://ibm.github.io/z_ansible_collections_doc/zhmc-ansible-modules/docs/source/development.html#development) to learn how you can create an environment and test the collections modules. +<br/>Review the [development docs](https://ibm.github.io/z_ansible_collections_doc/zhmc-ansible-modules/docs/source/development.html#development) to learn how you can create an environment and test the collections modules. ## Communication If you would like to communicate with this community, you can do so through the following options. - * GitHub [discussions](https://github.com/ansible-collections/ibm_zos_core/discussions). - * GitHub [issues](https://github.com/ansible-collections/ibm_zos_core/issues/new/choose). - * [Ansible Forum](https://forum.ansible.com/), please use the `zos` tag to ensure proper awareness. - * Discord [System Z Enthusiasts](https://forum.ansible.com/) room [ansible](https://discord.gg/nKC8F89v). - * Matrix Ansible room [ansible-zos](#ansible-zos:matrix.org). - * Ansible community Matrix [rooms](https://docs.ansible.com/ansible/latest/community/communication.html#general-channels). +* GitHub [discussions](https://github.com/ansible-collections/ibm_zos_core/discussions). +* GitHub [issues](https://github.com/ansible-collections/ibm_zos_core/issues/new/choose). +* [Ansible Forum](https://forum.ansible.com/), please use the `zos` tag to ensure proper awareness. +* Discord [System Z Enthusiasts](https://forum.ansible.com/) room [ansible](https://discord.gg/nKC8F89v). +* Matrix Ansible room [ansible-zos](#ansible-zos:matrix.org). +* Ansible community Matrix [rooms](https://docs.ansible.com/ansible/latest/community/communication.html#general-channels). ## Support As Red Hat Ansible [Certified Content](https://catalog.redhat.com/software/search?target_platforms=Red%20Hat%20Ansible%20Automation%20Platform), this collection is entitled to [support](https://access.redhat.com/support/) through [Ansible Automation Platform](https://www.redhat.com/en/technologies/management/ansible) (AAP). After creating a Red Hat support case, if it is determined the issue belongs to IBM, Red Hat will instruct you to create an [IBM support case](https://www.ibm.com/mysupport/s/createrecord/NewCase) and share the case number with Red Hat so that a collaboration can begin between Red Hat and IBM. -<p> </p> -If a support case cannot be opened with Red Hat and the collection has been obtained either from [Galaxy](https://galaxy.ansible.com/ui/) or [GitHub](https://github.com/ansible-collections/ibm_zos_core), there is community support available at no charge. Community support is limited to the collection; community support does not include any of the Ansible Automation Platform components, [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau), [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) or [ansible-core](https://github.com/ansible/ansible). +<br/>If a support case cannot be opened with Red Hat and the collection has been obtained either from [Galaxy](https://galaxy.ansible.com/ui/) or [GitHub](https://github.com/ansible-collections/ibm_zos_core), there is community support available at no charge. Community support is limited to the collection; community support does not include any of the Ansible Automation Platform components, [IBM Z Open Automation Utilities](https://www.ibm.com/docs/en/zoau), [IBM Open Enterprise SDK for Python](https://www.ibm.com/products/open-enterprise-python-zos) or [ansible-core](https://github.com/ansible/ansible). -<p> </p> -The current supported versions of this collection can be found listed under the [release section](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). +<br/>The current supported versions of this collection can be found listed under the [release section](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html). ## Release Notes and Roadmap @@ -196,18 +173,17 @@ For AAP users, to see the supported ansible-core versions, review the [AAP Life For Galaxy and GitHub users, to see the supported ansible-core versions, review the [ansible-core support matrix](https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix). -<p> </p> -The collection's changelogs can be reviewed in the following table. +<br/>The collection's changelogs can be reviewed in the following table. -| Version | ansible-core | Ansible | Status | -|---------|--------------|---------|----------------------------| -| 1.11.x | >=2.16.x | >=9.0.x | In development (unreleased)| -| [1.10.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0-beta.1/CHANGELOG.rst) | >=2.15.x | >=8.0.x | In preview | -| [1.9.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 05 Feb 2024 | -| [1.8.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 13 Dec 2023 | -| [1.7.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | >=2.14.x | >=7.0.x | 10 Oct 2023 | -| [1.6.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.6.0/CHANGELOG.rst) | >=2.9.x | >=2.9.x | 28 June 2023 | -| [1.5.x](https://github.com/ansible-collections/ibm_zos_core/blob/v1.5.0/CHANGELOG.rst) | >=2.9.x | >=2.9.x | 25 April 2023 | +| Version | Status | Release notes | Changelogs | +|----------|----------------------------|---------------|------------| +| 1.11.x | In development (unreleased)| unreleased | unreleased | +| 1.10.x | Current | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-10-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0/CHANGELOG.rst) | +| 1.9.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-9-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | +| 1.8.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-8-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | +| 1.7.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-7-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | +| 1.6.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-6-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.6.0/CHANGELOG.rst) | +| 1.5.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-5-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.5.0/CHANGELOG.rst) | ## Related Information @@ -217,4 +193,4 @@ Supplemental content on getting started with Ansible, architecture and use cases ## License Information Some portions of this collection are licensed under [GNU General Public License, Version 3.0](https://opensource.org/licenses/GPL-3.0), and other portions of this collection are licensed under [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). -See individual files for applicable licenses. \ No newline at end of file +See individual files for applicable licenses. diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 6e034e91c..4d9648079 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -78,6 +78,20 @@ releases: name: zos_tso_command namespace: '' release_date: '2022-06-07' + 1.10.0: + changes: + release_summary: 'Release Date: ''2024-06-11'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - v1.10.0_summary.yml + release_date: '2024-06-11' 1.10.0-beta.1: changes: breaking_changes: diff --git a/changelogs/fragments/v1.10.0_summary.yml b/changelogs/fragments/v1.10.0_summary.yml new file mode 100644 index 000000000..129c40746 --- /dev/null +++ b/changelogs/fragments/v1.10.0_summary.yml @@ -0,0 +1,6 @@ +release_summary: | + Release Date: '2024-06-11' + This changelog describes all changes made to the modules and plugins included + in this collection. The release date is the date the changelog is created. + For additional details such as required dependencies and availability review + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 01647e010..b63b39562 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -815,6 +815,10 @@ Notes Beginning in version 1.8.x, zos\_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option \ :literal:`executable`\ that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos\_copy.html) error. + It is the playbook author or user's responsibility to ensure they have appropriate authority to the RACF FACILITY resource class. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. + + To use this module, you must define the RACF FACILITY class profile and allow READ access to RACF FACILITY profile MVS.MCSOPER.ZOAU. If your system uses a different security product, consult that product's documentation to configure the required security classes. + See Also diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 6509ac286..8f7e76df1 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -35,7 +35,7 @@ cmd If the command contains any special characters ($, &, etc), they must be escaped using double backslashes like \\\\\\$. - For example, to display job by job name the command would be \ :literal:`cmd:"\\\\$dj''HELLO''"`\ + For example, to display job by job name the command would be ``cmd:"\\$dj''HELLO''"`` | **required**: True | **type**: str diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index 6f36e05e2..d2977c486 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -275,6 +275,12 @@ Examples cmd: ./scripts/PROGRAM removes: /u/user/pgm_input.txt + - name: Run a shell script on the remote system + zos_script: + cmd: ./scripts/program.sh + executable: /bin/sh + remote_src: true + diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 1fde47fab..418edafa0 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,8 +6,8 @@ Releases ======== -Version 1.10.0-beta.1 -===================== +Version 1.10.0 +============== Major Changes ------------- @@ -95,6 +95,7 @@ It is intended to assist in updating your playbooks so this collection will cont Availability ------------ +* `Automation Hub`_ * `Galaxy`_ * `GitHub`_ @@ -106,12 +107,13 @@ controller and z/OS managed node dependencies. Known Issues ------------ - +- ``zos_job_submit`` - when setting 'location' to 'local' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. - ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. +- ``zos_data_set`` - When data set creation fails, exception can throw a bad import error instead of data set creation error. +- ``zos_copy`` - To use this module, you must define the RACF FACILITY class profile and allow READ access to RACF FACILITY profile MVS.MCSOPER.ZOAU. If your system uses a different security product, consult that product's documentation to configure the required security classes. +- ``zos_job_submit``, ``zos_job_output``, ``zos_operator_action_query`` - encounters JSON decoding (DecodeError, TypeError, KeyError) errors when interacting with results that contain non-printable UTF-8 characters in the response. This will be addressed in **ZOAU version 1.3.2** and later. -- ``zos_job_submit``, ``zos_job_output``, ``zos_operator_action_query`` - encounters UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. - - - If you encounter this, some options are to: + - Some options to work around this known issue are: - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - Ignore module errors by using **ignore_errors:true** for a specific playbook task. @@ -119,6 +121,10 @@ Known Issues job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. +- In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. +- Use of special characters (#, @, $, \- ) in different options like data set names and commands is not fully supported, some modules support them but is the user responsibility to escape them. Read each module documentation for further details. + + Version 1.9.1 ============= @@ -144,7 +150,7 @@ controller and z/OS managed node dependencies. Known Issues ------------ -- ``zos_job_submit`` - when setting 'location' to 'LOCAL' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. +- ``zos_job_submit`` - when setting 'location' to 'local' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. - ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. - ``zos_job_submit``, ``zos_job_output``, ``zos_operator_action_query`` - encounters UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. This has been addressed in this release and corrected with **ZOAU version 1.2.5.6** or later. diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index acb0e6559..70fa46e03 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -23,22 +23,22 @@ currently supported. For example, if a collection releases with a minimum version of ``ansible-core`` 2.14.0 (Ansible 7.0) and later this enters into EOL, then a newer supported version of ``ansible-core`` (Ansible) must be selected. When choosing a newer ``ansible-core`` (Ansible) version, review the `ansible-core support matrix`_ to select the appropriate dependencies. -This is important to note, different releases of ``ansible-core`` can require newer controller and managed node +This is important to note, different releases of ``ansible-core`` can require newer control node and managed node dependencies such as is the case with Python. -If the controller is Ansible Automation Platform (AAP), review the `Red Hat Ansible Automation Platform Life Cycle`_ +If the control node is Ansible Automation Platform (AAP), review the `Red Hat Ansible Automation Platform Life Cycle`_ to select a supported AAP version. For IBM product lifecycle information, you can search for products using a product name, version or ID. For example, -to view IBM's **Open Enterprise SDK for Python** lifecycle, search on product ID `5655-PYT`_, and for **Z Open Automation Utilities**, -search on product ID `5698-PA1`_. +to view IBM's `Open Enterprise SDK for Python lifecycle`_, search on product ID `5655-PYT`_, and for +`Z Open Automation Utilities lifecycle`_, search on product ID `5698-PA1`_. Support Matrix ============== +---------+----------------------------+---------------------------------------------------+---------------+---------------+ | Version | Controller | Managed Node | GA | End of Life | +=========+============================+===================================================+===============+===============+ -| 1.10.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V2Rx | In preview | TBD | +| 1.10.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V2Rx | 21 June 2024 | 21 June 2026 | | |- `Ansible`_ >=8.0.x |- `z/OS shell`_ | | | | |- `AAP`_ >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | | | |- IBM `Z Open Automation Utilities`_ >=1.3.0 | | | @@ -88,8 +88,12 @@ Support Matrix https://www.ibm.com/support/knowledgecenter/en/SSLTBW_2.4.0/com.ibm.zos.v2r4.bpxa400/part1.htm .. _z/OS: https://www.ibm.com/docs/en/zos +.. _Open Enterprise SDK for Python lifecycle: + https://www.ibm.com/support/pages/lifecycle/search?q=5655-PYT .. _5655-PYT: https://www.ibm.com/support/pages/lifecycle/search?q=5655-PYT +.. _Z Open Automation Utilities lifecycle: + https://www.ibm.com/support/pages/lifecycle/search?q=5698-PA1 .. _5698-PA1: https://www.ibm.com/support/pages/lifecycle/search?q=5698-PA1 .. _ansible-core: diff --git a/galaxy.yml b/galaxy.yml index c408424aa..2e9d280dc 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: 1.10.0-beta.1 +version: "1.10.0" # Collection README file readme: README.md diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 9b4dfde5e..5bc58ec94 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.10.0-beta.1" +version: "1.10.0" managed_requirements: - name: "IBM Open Enterprise SDK for Python" diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 38a1542b5..e9766bd22 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -481,6 +481,15 @@ behavior using module option C(executable) that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. + - It is the playbook author or user's responsibility to ensure they have + appropriate authority to the RACF FACILITY resource class. A user is + described as the remote user, configured either for the playbook or + playbook tasks, who can also obtain escalated privileges to execute as + root or another user. + - To use this module, you must define the RACF FACILITY class profile + and allow READ access to RACF FACILITY profile MVS.MCSOPER.ZOAU. If + your system uses a different security product, consult that product's + documentation to configure the required security classes. seealso: - module: zos_fetch - module: zos_data_set diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index d34781fac..54817936d 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -36,7 +36,7 @@ - If the command contains single-quotations, another set of single quotes must be added. - For example, change the command "...,P='DSN3EPX,-DBC1,S'" to "...,P=''DSN3EPX,-DBC1,S'' ". - If the command contains any special characters ($, &, etc), they must be escaped using - double backslashes like \\\$. + double backslashes like \\\\\\$. - For example, to display job by job name the command would be C(cmd:"\\$dj''HELLO''") type: str required: true diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index 580773219..b07853617 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -168,6 +168,12 @@ zos_script: cmd: ./scripts/PROGRAM removes: /u/user/pgm_input.txt + +- name: Run a shell script on the remote system + zos_script: + cmd: ./scripts/program.sh + executable: /bin/sh + remote_src: true """ RETURN = r""" From c2fc1c3fadfee4c86f6d0173850457a7643a8b9b Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 27 Jun 2024 23:22:31 -0700 Subject: [PATCH 420/495] Update README with new terminology Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 0146087a0..629ce15b4 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ To upgrade the collection to the latest available version, run the following com ansible-galaxy collection install ibm.ibm_zos_core --upgrade ``` -<br/>You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax to install version 1.0.0: +<br/>You can also install a specific version of the collection, for example, if you need to downgrade for some reason. Use the following syntax to install version 1.0.0: ```sh ansible-galaxy collection install ibm.ibm_zos_core:1.0.0 @@ -123,9 +123,10 @@ environment_vars: ## Testing -All releases, including betas will meet the following test criteria. +All releases, will meet the following test criteria. * 100% success for [Functional](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/functional) tests. +* 100% success for [Unit](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/unit) tests. * 100% success for [Sanity](https://docs.ansible.com/ansible/latest/dev_guide/testing/sanity/index.html#all-sanity-tests) tests as part of[ansible-test](https://docs.ansible.com/ansible/latest/dev_guide/testing.html#run-sanity-tests). * 100% success for [pyflakes](https://github.com/PyCQA/pyflakes/blob/main/README.rst). * 100% success for [ansible-lint](https://ansible.readthedocs.io/projects/lint/) allowing only false positives. @@ -153,9 +154,8 @@ If you would like to communicate with this community, you can do so through the * GitHub [discussions](https://github.com/ansible-collections/ibm_zos_core/discussions). * GitHub [issues](https://github.com/ansible-collections/ibm_zos_core/issues/new/choose). * [Ansible Forum](https://forum.ansible.com/), please use the `zos` tag to ensure proper awareness. -* Discord [System Z Enthusiasts](https://forum.ansible.com/) room [ansible](https://discord.gg/nKC8F89v). -* Matrix Ansible room [ansible-zos](#ansible-zos:matrix.org). -* Ansible community Matrix [rooms](https://docs.ansible.com/ansible/latest/community/communication.html#general-channels). +* Discord [System Z Enthusiasts](https://discord.gg/Kmy5QaUGbB) room [ansible](https://discord.gg/nHrDdRTC). +* Matrix general usage questions [room](https://matrix.to/#/#users:ansible.com). ## Support @@ -175,15 +175,15 @@ For Galaxy and GitHub users, to see the supported ansible-core versions, review <br/>The collection's changelogs can be reviewed in the following table. -| Version | Status | Release notes | Changelogs | -|----------|----------------------------|---------------|------------| -| 1.11.x | In development (unreleased)| unreleased | unreleased | -| 1.10.x | Current | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-10-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0/CHANGELOG.rst) | -| 1.9.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-9-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | -| 1.8.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-8-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | -| 1.7.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-7-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | -| 1.6.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-6-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.6.0/CHANGELOG.rst) | -| 1.5.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-5-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.5.0/CHANGELOG.rst) | +| Version | Status | Release notes | Changelogs | +|----------|----------------|---------------|------------| +| 1.11.x | In development | unreleased | unreleased | +| 1.10.x | Current | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-10-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0/CHANGELOG.rst) | +| 1.9.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-9-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | +| 1.8.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-8-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | +| 1.7.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-7-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | +| 1.6.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-6-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.6.0/CHANGELOG.rst) | +| 1.5.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-5-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.5.0/CHANGELOG.rst) | ## Related Information From 005d8ac8ef09a1aa306a13035081878415ea3e11 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 27 Jun 2024 23:24:55 -0700 Subject: [PATCH 421/495] Update support matrix page with much additional content Signed-off-by: ddimatos <dimatos@gmail.com> --- .../source/resources/releases_maintenance.rst | 52 +++++++++++++++++-- 1 file changed, 48 insertions(+), 4 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 70fa46e03..ac38b756b 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -6,16 +6,62 @@ Releases and maintenance ======================== -This table describes the collections release dates, dependency versions and End of Life dates (EOL). +This section describes the collections release dates, dependency versions and End of Life dates (EOL) +and support coverage. The ``ibm_zos_core`` collection is developed and released on a flexible release cycle; generally, each quarter a beta is released followed by a GA version. Occasionally, the cycle may be extended to properly implement and test larger changes before a new release is made available. End of Life for this collection is generally a 2-year cycle unless a dependency reaches EOL prior to the 2 years. -For example, if a collection has released and its dependency reaches EOL 1 year later, then the collection will EOL +For example, if a collection has released and a dependency reaches EOL 1 year later, then the collection will EOL at the same time as the dependency, 1 year later. +Life Cycle Phase +================ + +To encourage the adoption of new features while keeping the high standard of stability inherent, +support is divided into life cycle phases; **full support** which covers the first year +and **maintenance support** which covers the second year. + ++--------------------------+------------------------------------+---------------------------+ +| Life Cycle Phase | Full Support | Maintenance Support | ++==========================+====================================+===========================+ +| Critical security fixes | Yes | Yes | ++--------------------------+------------------------------------+---------------------------+ +| Bug fixes by severity | Critical and high severity issues | Critical severity issues | ++--------------------------+------------------------------------+---------------------------+ + +Severities +========== + +Severity 1 (Critical): +A problem that severely impacts your use of the software in a productionenvironment (such as loss +of production data or in which your production systems are not functioning). The situation halts +your business operations and no procedural workaround exists. + +Severity 2 (high): +A problem where the software is functioning but your use in a production environment is severely +reduced. The situation is causing a high impact to portions of your business operations and no +procedural workaround exists. + +Severity 3 (medium): +A problem that involves partial, non-critical loss of use of the software in a production environment +or development environment. For production environments, there is a medium-to-low impact on your +business, but your business continues to function, including by using a procedural workaround. For +development environments, where the situation is causing your project to no longer continue or +migrate into production. + +Severity 4 (low): +A general usage question, reporting of a documentation error, or recommendation for a future product +enhancement or modification. For production environments, there is low-to-no impact on your business +or the performance or functionality of your system. For development environments, there is +a medium-to-low impact on your business, but your business continues to function, including by +using a procedural workaround. + +Support Matrix +============== + These are the component versions available when the collection was made generally available (GA). The underlying component version is likely to change as it reaches EOL, thus components must be a version that is currently supported. @@ -33,8 +79,6 @@ For IBM product lifecycle information, you can search for products using a produ to view IBM's `Open Enterprise SDK for Python lifecycle`_, search on product ID `5655-PYT`_, and for `Z Open Automation Utilities lifecycle`_, search on product ID `5698-PA1`_. -Support Matrix -============== +---------+----------------------------+---------------------------------------------------+---------------+---------------+ | Version | Controller | Managed Node | GA | End of Life | +=========+============================+===================================================+===============+===============+ From 667f9acffadc6c06786087275ce1420854323f21 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 28 Jun 2024 00:00:02 -0700 Subject: [PATCH 422/495] Update doc with shell requirement. Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index ac38b756b..39a208ac7 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -79,6 +79,14 @@ For IBM product lifecycle information, you can search for products using a produ to view IBM's `Open Enterprise SDK for Python lifecycle`_, search on product ID `5655-PYT`_, and for `Z Open Automation Utilities lifecycle`_, search on product ID `5698-PA1`_. +The z/OS managed node includes several shells, currently the only supported shell is the z/OS Shell located in path +`/bin/sh`_. To configure which shell the ansible control node will use on the target machine, set inventory variable + **ansible_shell_executable**. + +``` +ansible_shell_executable: /bin/sh +``` + +---------+----------------------------+---------------------------------------------------+---------------+---------------+ | Version | Controller | Managed Node | GA | End of Life | +=========+============================+===================================================+===============+===============+ @@ -143,4 +151,6 @@ to view IBM's `Open Enterprise SDK for Python lifecycle`_, search on product ID .. _ansible-core: https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix .. _Ansible: - https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix \ No newline at end of file + https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix +.. _/bin/sh: + https://www.ibm.com/docs/en/zos/3.1.0?topic=descriptions-sh-invoke-shell \ No newline at end of file From be5be5d44cf0f1b060a223fb0463222966822766 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 28 Jun 2024 00:01:43 -0700 Subject: [PATCH 423/495] Update doc with shell requirement. Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 39a208ac7..f8db03935 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -81,7 +81,7 @@ to view IBM's `Open Enterprise SDK for Python lifecycle`_, search on product ID The z/OS managed node includes several shells, currently the only supported shell is the z/OS Shell located in path `/bin/sh`_. To configure which shell the ansible control node will use on the target machine, set inventory variable - **ansible_shell_executable**. +**ansible_shell_executable**. ``` ansible_shell_executable: /bin/sh From 0fe89a23a01938e72bfdb63f57718622c7580d55 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 28 Jun 2024 00:04:44 -0700 Subject: [PATCH 424/495] Update doc with shell requirement. Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index f8db03935..2ff48c36c 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -83,9 +83,9 @@ The z/OS managed node includes several shells, currently the only supported shel `/bin/sh`_. To configure which shell the ansible control node will use on the target machine, set inventory variable **ansible_shell_executable**. -``` -ansible_shell_executable: /bin/sh -``` +.. code-block:: sh + + ansible_shell_executable: /bin/sh +---------+----------------------------+---------------------------------------------------+---------------+---------------+ | Version | Controller | Managed Node | GA | End of Life | From 73d52792687790bf6a9b49f8344580bf287de0f2 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 28 Jun 2024 00:05:46 -0700 Subject: [PATCH 425/495] Update doc with shell requirement. Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 2ff48c36c..7e8753102 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -87,6 +87,7 @@ The z/OS managed node includes several shells, currently the only supported shel ansible_shell_executable: /bin/sh + +---------+----------------------------+---------------------------------------------------+---------------+---------------+ | Version | Controller | Managed Node | GA | End of Life | +=========+============================+===================================================+===============+===============+ From 29ca027a16a448f4e28c12257114114a94c9d850 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 28 Jun 2024 11:57:47 -0700 Subject: [PATCH 426/495] Remove incorrect note for release 1.9.x Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 3 --- 1 file changed, 3 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 418edafa0..c8c2f6e96 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -276,9 +276,6 @@ Several modules have reported UTF-8 decoding errors when interacting with result An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended and documented **space_primary** option. -In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, -this is so that the collection can continue to maintain certified status. - Availability ------------ From d617ccb3bfe3ac5d734a5660b67d2d34385f97c7 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 28 Jun 2024 12:13:18 -0700 Subject: [PATCH 427/495] added fragment Signed-off-by: ddimatos <dimatos@gmail.com> --- changelogs/fragments/1552-readme-support-updates.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 changelogs/fragments/1552-readme-support-updates.yml diff --git a/changelogs/fragments/1552-readme-support-updates.yml b/changelogs/fragments/1552-readme-support-updates.yml new file mode 100644 index 000000000..43611e88e --- /dev/null +++ b/changelogs/fragments/1552-readme-support-updates.yml @@ -0,0 +1,10 @@ +trivial: + - README - updated formatting and how it was written to be clearer. + (https://github.com/ansible-collections/ibm_zos_core/pull/1559). + + - release_notes.rst - removed known issue that was incorrect for v1.9.x. + (https://github.com/ansible-collections/ibm_zos_core/pull/1559). + + - releases_maintenance.rst - Added new lifecycle stages and supported + shell types. + (https://github.com/ansible-collections/ibm_zos_core/pull/1559). From c7f71ae8b6e719f20e53e2a8dff7cf63b93e2d18 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 28 Jun 2024 14:55:54 -0700 Subject: [PATCH 428/495] Update serverities Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 7e8753102..99a24c539 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -22,7 +22,7 @@ Life Cycle Phase To encourage the adoption of new features while keeping the high standard of stability inherent, support is divided into life cycle phases; **full support** which covers the first year -and **maintenance support** which covers the second year. +and **maintenance support** which covers the second year. +--------------------------+------------------------------------+---------------------------+ | Life Cycle Phase | Full Support | Maintenance Support | @@ -36,7 +36,7 @@ Severities ========== Severity 1 (Critical): -A problem that severely impacts your use of the software in a productionenvironment (such as loss +A problem that severely impacts your use of the software in a production environment (such as loss of production data or in which your production systems are not functioning). The situation halts your business operations and no procedural workaround exists. @@ -47,17 +47,15 @@ procedural workaround exists. Severity 3 (medium): A problem that involves partial, non-critical loss of use of the software in a production environment -or development environment. For production environments, there is a medium-to-low impact on your -business, but your business continues to function, including by using a procedural workaround. For -development environments, where the situation is causing your project to no longer continue or -migrate into production. +or development environment and your business continues to function, including by using a procedural +workaround. Severity 4 (low): A general usage question, reporting of a documentation error, or recommendation for a future product -enhancement or modification. For production environments, there is low-to-no impact on your business -or the performance or functionality of your system. For development environments, there is -a medium-to-low impact on your business, but your business continues to function, including by -using a procedural workaround. +enhancement or modification. + +Severities 3 and 4 are generally addressed in subsequent releases to ensure a high standard of stability +remains available for production environments. Support Matrix ============== From db33f97240dde1982a66ed4268619458044f8480 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Fri, 28 Jun 2024 14:57:10 -0700 Subject: [PATCH 429/495] Update serverities Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 99a24c539..391456769 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -22,7 +22,7 @@ Life Cycle Phase To encourage the adoption of new features while keeping the high standard of stability inherent, support is divided into life cycle phases; **full support** which covers the first year -and **maintenance support** which covers the second year. +and **maintenance support** which covers the second year. +--------------------------+------------------------------------+---------------------------+ | Life Cycle Phase | Full Support | Maintenance Support | From 0a5ef1603659d97ae96a3f4e079c125c1da3b191 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Tue, 2 Jul 2024 15:29:17 -0600 Subject: [PATCH 430/495] [Enhancement] [zos_job_submit, zos_script, zos_unarchive] Remove use of deep copy when calling action modules (#1561) * add workaround fix * Use correct action loader * Changed action plugin * Added changelog fragment * Added unarchive docs * Added changelog * Added changelog * Fixed sanity issues * fixed pyflakes --- .../fragments/1561-remove_deep_copy.yml | 10 ++++++++++ plugins/action/zos_job_submit.py | 20 +++++++++---------- plugins/action/zos_script.py | 17 +++++++--------- plugins/action/zos_unarchive.py | 20 +++++++++---------- plugins/modules/zos_job_submit.py | 1 - plugins/modules/zos_unarchive.py | 1 + 6 files changed, 38 insertions(+), 31 deletions(-) create mode 100644 changelogs/fragments/1561-remove_deep_copy.yml diff --git a/changelogs/fragments/1561-remove_deep_copy.yml b/changelogs/fragments/1561-remove_deep_copy.yml new file mode 100644 index 000000000..b6cdd4c75 --- /dev/null +++ b/changelogs/fragments/1561-remove_deep_copy.yml @@ -0,0 +1,10 @@ +minor_changes: + - zos_job_submit - Improved the copy to remote mechanic to avoid using deepcopy that could + result in failure for some systems. + (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + - zos_script - Improved the copy to remote mechanic to avoid using deepcopy that could + result in failure for some systems. + (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + - zos_unarchive - Improved the copy to remote mechanic to avoid using deepcopy that could + result in failure for some systems. + (https://github.com/ansible-collections/ibm_zos_core/pull/1561). \ No newline at end of file diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 67047b648..20c8e28db 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -20,10 +20,8 @@ from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.parsing.convert_bool import boolean import os -import copy from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import template -from ansible_collections.ibm.ibm_zos_core.plugins.action.zos_copy import ActionModule as ZosCopyActionModule display = Display() @@ -151,15 +149,17 @@ def run(self, tmp=None, task_vars=None): remote_src=True, ) ) - copy_task = copy.deepcopy(self._task) + copy_task = self._task.copy() copy_task.args = copy_module_args - zos_copy_action_module = ZosCopyActionModule(task=copy_task, - connection=self._connection, - play_context=self._play_context, - loader=self._loader, - templar=self._templar, - shared_loader_obj=self._shared_loader_obj) - result.update(zos_copy_action_module.run(task_vars=task_vars)) + copy_action = self._shared_loader_obj.action_loader.get( + 'ibm.ibm_zos_core.zos_copy', + task=copy_task, + connection=self._connection, + play_context=self._play_context, + loader=self._loader, + templar=self._templar, + shared_loader_obj=self._shared_loader_obj) + result.update(copy_action.run(task_vars=task_vars)) if result.get("msg") is None: module_args["src"] = dest_path result.update( diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py index 36345810b..e481052a5 100644 --- a/plugins/action/zos_script.py +++ b/plugins/action/zos_script.py @@ -12,13 +12,11 @@ from __future__ import absolute_import, division, print_function __metaclass__ = type -import copy import shlex from os import path from ansible.plugins.action import ActionBase from ansible.module_utils.parsing.convert_bool import boolean -from ansible_collections.ibm.ibm_zos_core.plugins.action.zos_copy import ActionModule as ZosCopyActionModule from ansible.utils.display import Display display = Display() @@ -90,7 +88,7 @@ def run(self, tmp=None, task_vars=None): tempfile_path = tempfile_result.get('path') # Letting zos_copy handle the transfer of the script. - zos_copy_args = dict( + copy_module_args = dict( src=script_path, dest=tempfile_path, force=True, @@ -99,18 +97,17 @@ def run(self, tmp=None, task_vars=None): use_template=module_args.get('use_template', False), template_parameters=module_args.get('template_parameters', dict()) ) - copy_task = copy.deepcopy(self._task) - copy_task.args = zos_copy_args - zos_copy_action_plugin = ZosCopyActionModule( + copy_task = self._task.copy() + copy_task.args = copy_module_args + copy_action = self._shared_loader_obj.action_loader.get( + 'ibm.ibm_zos_core.zos_copy', task=copy_task, connection=self._connection, play_context=self._play_context, loader=self._loader, templar=self._templar, - shared_loader_obj=self._shared_loader_obj - ) - - zos_copy_result = zos_copy_action_plugin.run(task_vars=task_vars) + shared_loader_obj=self._shared_loader_obj) + zos_copy_result = copy_action.run(task_vars=task_vars) result.update(zos_copy_result) if not result.get("changed") or result.get("failed"): diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index b0a1fa466..529346b6c 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -17,8 +17,6 @@ from ansible.utils.display import Display from ansible.module_utils.parsing.convert_bool import boolean import os -import copy -from ansible_collections.ibm.ibm_zos_core.plugins.action.zos_copy import ActionModule as ZosCopyActionModule USS_SUPPORTED_FORMATS = ['tar', 'zip', 'bz2', 'pax', 'gz'] @@ -102,15 +100,17 @@ def run(self, tmp=None, task_vars=None): is_binary=True, ) ) - copy_task = copy.deepcopy(self._task) + copy_task = self._task.copy() copy_task.args = copy_module_args - zos_copy_action_module = ZosCopyActionModule(task=copy_task, - connection=self._connection, - play_context=self._play_context, - loader=self._loader, - templar=self._templar, - shared_loader_obj=self._shared_loader_obj) - result.update(zos_copy_action_module.run(task_vars=task_vars)) + copy_action = self._shared_loader_obj.action_loader.get( + 'ibm.ibm_zos_core.zos_copy', + task=copy_task, + connection=self._connection, + play_context=self._play_context, + loader=self._loader, + templar=self._templar, + shared_loader_obj=self._shared_loader_obj) + result.update(copy_action.run(task_vars=task_vars)) display.vvv(u"Copy result {0}".format(result), host=self._play_context.remote_addr) if result.get("msg") is None: module_args["src"] = dest diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index ddbb069ff..e6e191060 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -710,7 +710,6 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, start_ti "fetch_max_retries": timeout, } - present = False duration = 0 job_submitted = None result = {} diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 8a5a53bc9..43312f449 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -35,6 +35,7 @@ - The remote absolute path or data set of the archive to be uncompressed. - I(src) can be a USS file or MVS data set name. - USS file paths should be absolute paths. + - MVS data sets supported types are C(SEQ), C(PDS), C(PDSE). - GDS relative names are supported C(e.g. USER.GDG(-1)). type: str required: true From 248ba8700bc9cda6ee5d9563694188470101167e Mon Sep 17 00:00:00 2001 From: Ketan Kelkar <ktnklkr@gmail.com> Date: Mon, 8 Jul 2024 21:36:44 -0700 Subject: [PATCH 431/495] [module_utils/data_set] surface errors in `data_set_cataloged` function (#1535) * raise exception when mvscmd fails; add supporting unit tests Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * re-categorize changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add full message text for rc=4 stdout Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update data_set_cataloged function to resolve gdg/gds to absolute names before passing them into listcat Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update data_set_cataloged function return false if GDSNameResolveError is caught Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename changelog fragment file Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * Update test_zos_blockinfile_func.py upper case 'FILE*' in c pgm * update data set func test case to use shell commands instead of zos_copy Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add escape chars to c_pgm Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * re-add cleanup steps, remove commented out call to zos_copy Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- ...l-data_set-function-data_set_cataloged.yml | 5 + plugins/module_utils/data_set.py | 28 +++++ .../modules/test_zos_blockinfile_func.py | 2 +- .../modules/test_zos_data_set_func.py | 15 ++- tests/unit/test_module_utils_data_set_unit.py | 117 ++++++++++++++++++ 5 files changed, 158 insertions(+), 9 deletions(-) create mode 100644 changelogs/fragments/1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml create mode 100644 tests/unit/test_module_utils_data_set_unit.py diff --git a/changelogs/fragments/1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml b/changelogs/fragments/1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml new file mode 100644 index 000000000..b1501b050 --- /dev/null +++ b/changelogs/fragments/1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml @@ -0,0 +1,5 @@ +bugfixes: + - module_util/data_set.py - DataSet.data_set_cataloged function previously only returned + True or False, but failed to account for exceptions which occurred during the LISTCAT. + The fix now raises an MVSCmdExecError if the return code from LISTCAT is too high. + (https://github.com/ansible-collections/ibm_zos_core/pull/1535). diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 294debe64..568fbe4a6 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -352,7 +352,19 @@ def data_set_cataloged(name, volumes=None): Returns: bool -- If data is is cataloged. + + Raise: + MVSCmdExecError: When the call to IDCAMS fails with rc greater than 4. """ + + # Resolve GDS names before passing it into listcat + if DataSet.is_gds_relative_name(name): + try: + name = DataSet.resolve_gds_absolute_name(name) + except GDSNameResolveError: + # if GDS name cannot be resolved, it's not in the catalog. + return False + # We need to unescape because this calls to system can handle # special characters just fine. name = name.upper().replace("\\", '') @@ -363,6 +375,13 @@ def data_set_cataloged(name, volumes=None): "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin ) + # The above 'listcat entries' command to idcams returns: + # rc=0 if data set found in catalog + # rc=4 if data set NOT found in catalog + # rc>4 for other errors + if rc > 4: + raise MVSCmdExecError(rc, stdout, stderr) + if volumes: cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name) or [] if bool(set(volumes) & set(cataloged_volume_list)): @@ -380,6 +399,8 @@ def data_set_cataloged_volume_list(name): name (str) -- The data set name to check if cataloged. Returns: list{str} -- A list of volumes where the dataset is cataloged. + Raise: + MVSCmdExecError: When the call to IDCAMS fails with rc greater than 4. """ name = name.upper() module = AnsibleModuleHelper(argument_spec={}) @@ -387,6 +408,13 @@ def data_set_cataloged_volume_list(name): rc, stdout, stderr = module.run_command( "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin ) + # The above 'listcat entries all' command to idcams returns: + # rc=0 if data set found in catalog + # rc=4 if data set NOT found in catalog + # rc>4 for other errors + if rc > 4: + raise MVSCmdExecError(rc, stdout, stderr) + delimiter = 'VOLSER------------' arr = stdout.split(delimiter)[1:] # throw away header diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 0bc40866d..2f9e6d3c2 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -30,7 +30,7 @@ { char dsname[ strlen(argv[1]) + 4]; sprintf(dsname, \\\"//'%s'\\\", argv[1]); - file* member; + FILE* member; member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 16e3c0051..495e53f9e 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -514,9 +514,9 @@ def test_batch_data_set_and_member_creation(ansible_zos_module): int main(int argc, char** argv) { char dsname[ strlen(argv[1]) + 4]; - sprintf(dsname, "//'%s'", argv[1]); + sprintf(dsname, \\\"//'%s'\\\", argv[1]); FILE* member; - member = fopen(dsname, "rb,type=record"); + member = fopen(dsname, \\\"rb,type=record\\\"); sleep(300); fclose(member); return 0; @@ -581,12 +581,11 @@ def test_data_member_force_delete(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True - # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) - hosts.all.zos_copy( - content=call_c_jcl.format(default_data_set_name, member_1), - dest='/tmp/disp_shr/call_c_pgm.jcl', - force=True + hosts.all.file(path="/tmp/disp_shr/", state="directory") + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + hosts.all.shell( + cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1)}\""+ + " > /tmp/disp_shr/call_c_pgm.jcl" ) hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") diff --git a/tests/unit/test_module_utils_data_set_unit.py b/tests/unit/test_module_utils_data_set_unit.py new file mode 100644 index 000000000..1d5878766 --- /dev/null +++ b/tests/unit/test_module_utils_data_set_unit.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import pytest + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import MVSCmdExecError + +IMPORT_NAME = "ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set" + + +class DummyModule(object): + """Used in place of Ansible's module + so we can easily mock the desired behavior.""" + + def __init__(self, rc=0, stdout="", stderr=""): + self.rc = rc + self.stdout = stdout + self.stderr = stderr + + def run_command(self, *args, **kwargs): + return (self.rc, self.stdout, self.stderr) + + +# Unit tests are intended to exercise code paths (not test for functionality). + +# These unit tests are NOT run on any z/OS system, so hard-coded data set names will not matter. +data_set_name = "USER.PRIVATE.TESTDS" + +stdout_ds_in_catatlog = """0 + LISTCAT ENTRIES('{0}') +0NONVSAM ------- {0} + IN-CAT --- CATALOG.SVPLEX9.MASTER +1IDCAMS SYSTEM SERVICES """.format(data_set_name) + +stdout_ds_not_in_catalog=""" +1IDCAMS SYSTEM SERVICES TIME: 13:34:18 06/06/24 PAGE 1 +0 + LISTCAT ENTRIES('{0}') +0IDC3012I ENTRY {0} NOT FOUND + IDC3009I ** VSAM CATALOG RETURN CODE IS 8 - REASON CODE IS IGG0CLEG-42 + IDC1566I ** {0} NOT LISTED +1IDCAMS SYSTEM SERVICES TIME: 13:34:18 06/06/24 PAGE 2 +0 THE NUMBER OF ENTRIES PROCESSED WAS: +0 AIX -------------------0 + ALIAS -----------------0 + CLUSTER ---------------0 + DATA ------------------0 + GDG -------------------0 + INDEX -----------------0 + NONVSAM ---------------0 + PAGESPACE -------------0 + PATH ------------------0 + SPACE -----------------0 + USERCATALOG -----------0 + TAPELIBRARY -----------0 + TAPEVOLUME ------------0 + TOTAL -----------------0 +0 THE NUMBER OF PROTECTED ENTRIES SUPPRESSED WAS 0 +0IDC0001I FUNCTION COMPLETED, HIGHEST CONDITION CODE WAS 4 +0 +0IDC0002I IDCAMS PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 4 +""".format(data_set_name) + +# passing in a lowercase data set causes idcams to fail. +# this behavior isn't possible via ansible because we upper-case the input. +stdout_mvscmd_failed="""0 + LISTCAT ENTRIES('...................') +0IDC3203I ITEM '...................' DOES NOT ADHERE TO RESTRICTIONS +0IDC3202I ABOVE TEXT BYPASSED UNTIL NEXT COMMAND. CONDITION CODE IS 12 +0 +0IDC0002I IDCAMS PROCESSING COMPLETE. MAXIMUM CONDITION CODE WAS 12""" + + +@pytest.mark.parametrize( + ("rc, stdout, expected_return, expected_exception_type"), + [ + (0, stdout_ds_in_catatlog, True, None), + (4, stdout_ds_not_in_catalog, False, None), + (12, stdout_mvscmd_failed, None, MVSCmdExecError) + ], +) +def test_dataset_cataloged_unit(zos_import_mocker, rc, stdout, expected_return, expected_exception_type): + mocker, importer = zos_import_mocker + zos_module_util_data_set = importer(IMPORT_NAME) + mocker.patch( + "{0}.AnsibleModuleHelper".format(IMPORT_NAME), + create=True, + return_value=DummyModule(rc=rc, stdout=stdout), + ) + + + results = None + error_raised = False + try: + results = zos_module_util_data_set.DataSet.data_set_cataloged(data_set_name) + except Exception as e: + error_raised = True + assert type(e) == expected_exception_type + finally: + if not expected_exception_type: + assert not error_raised + assert results == expected_return \ No newline at end of file From 0d9faa6a83d708f8558b10dfec40bca07cfa8f81 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 9 Jul 2024 11:31:11 -0600 Subject: [PATCH 432/495] [Enabler] [zos_copy] Add GDG/GDS support to zos_copy (#1564) * Add use of new data set class * Add tests for GDS source * Add support for a GDS destination * Add more GDS tests * Fix name resolution when allocating a new GDS * Fix GDS validations * Add more type validations * Remove unnecessary name resolution * Add support to copy a GDG to USS * Add copy of complete GDGs * Add support for a GDS as backup * Add special symbols test and fix backup ones * Add changelog fragment * Update docs * Add GDG attributes for dest_data_set * Update module RST * Fix pep8 issue * Fix backups without backup names * Fix non-GDG dest dataset allocation * Fix GDS allocation --- .../fragments/1564-zos_copy_gdg_support.yml | 4 + docs/source/modules/zos_copy.rst | 89 ++- plugins/module_utils/copy.py | 52 ++ plugins/module_utils/data_set.py | 67 ++ plugins/modules/zos_copy.py | 375 ++++++++++- .../functional/modules/test_zos_copy_func.py | 614 +++++++++++++++++- 6 files changed, 1165 insertions(+), 36 deletions(-) create mode 100644 changelogs/fragments/1564-zos_copy_gdg_support.yml diff --git a/changelogs/fragments/1564-zos_copy_gdg_support.yml b/changelogs/fragments/1564-zos_copy_gdg_support.yml new file mode 100644 index 000000000..b9f908bdb --- /dev/null +++ b/changelogs/fragments/1564-zos_copy_gdg_support.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_copy - add support for copying generation data sets (GDS) and + generation data groups (GDG), as well as using a GDS for backup. + (https://github.com/ansible-collections/ibm_zos_core/pull/1564). \ No newline at end of file diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index b63b39562..69639e39a 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -67,6 +67,8 @@ backup_name If \ :literal:`dest`\ is a data set member and \ :literal:`backup\_name`\ is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + If \ :emphasis:`backup\_name`\ is a generation data set (GDS), it must be a relative positive name (for example, \ :literal:`HLQ.USER.GDG(+1)`\ ). + | **required**: False | **type**: str @@ -105,6 +107,10 @@ dest When \ :literal:`dest`\ is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. + \ :literal:`dest`\ can be a previously allocated generation data set (GDS) or a new GDS. + + When \ :literal:`dest`\ is a generation data group (GDG), \ :literal:`src`\ must be a GDG too. The copy will allocate successive new generations in \ :literal:`dest`\ , the module will verify it has enough available generations before starting the copy operations. + When \ :literal:`dest`\ is a data set, you can override storage management rules by specifying \ :literal:`volume`\ if the storage class being used has GUARANTEED\_SPACE=YES specified, otherwise, the allocation will fail. See \ :literal:`volume`\ for more volume related processes. | **required**: True @@ -298,6 +304,10 @@ src If \ :literal:`src`\ is a VSAM data set, \ :literal:`dest`\ must also be a VSAM. + If \ :literal:`src`\ is a generation data set (GDS), it must be a previously allocated one. + + If \ :literal:`src`\ is a generation data group (GDG), \ :literal:`dest`\ can be another GDG or a USS directory. + Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. Required unless using \ :literal:`content`\ . @@ -334,6 +344,8 @@ volume dest_data_set Data set attributes to customize a \ :literal:`dest`\ data set to be copied into. + Some attributes only apply when \ :literal:`dest`\ is a generation data group (GDG). + | **required**: False | **type**: dict @@ -343,7 +355,7 @@ dest_data_set | **required**: True | **type**: str - | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, member, basic, library + | **choices**: ksds, esds, rrds, lds, seq, pds, pdse, member, basic, library, gdg space_primary @@ -470,6 +482,68 @@ dest_data_set | **type**: str + limit + Sets the \ :emphasis:`limit`\ attribute for a GDG. + + Specifies the maximum number, from 1 to 255(up to 999 if extended), of generations that can be associated with the GDG being defined. + + \ :emphasis:`limit`\ is required when \ :emphasis:`type=gdg`\ . + + | **required**: False + | **type**: int + + + empty + Sets the \ :emphasis:`empty`\ attribute for a GDG. + + If false, removes only the oldest GDS entry when a new GDS is created that causes GDG limit to be exceeded. + + If true, removes all GDS entries from a GDG base when a new GDS is created that causes the GDG limit to be exceeded. + + | **required**: False + | **type**: bool + + + scratch + Sets the \ :emphasis:`scratch`\ attribute for a GDG. + + Specifies what action is to be taken for a generation data set located on disk volumes when the data set is uncataloged from the GDG base as a result of EMPTY/NOEMPTY processing. + + | **required**: False + | **type**: bool + + + purge + Sets the \ :emphasis:`purge`\ attribute for a GDG. + + Specifies whether to override expiration dates when a generation data set (GDS) is rolled off and the \ :literal:`scratch`\ option is set. + + | **required**: False + | **type**: bool + + + extended + Sets the \ :emphasis:`extended`\ attribute for a GDG. + + If false, allow up to 255 generation data sets (GDSs) to be associated with the GDG. + + If true, allow up to 999 generation data sets (GDS) to be associated with the GDG. + + | **required**: False + | **type**: bool + + + fifo + Sets the \ :emphasis:`fifo`\ attribute for a GDG. + + If false, the order is the newest GDS defined to the oldest GDS. This is the default value. + + If true, the order is the oldest GDS defined to the newest GDS. + + | **required**: False + | **type**: bool + + use_template Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. @@ -794,6 +868,19 @@ Examples dest: HLQ.PRINT.NEW asa_text: true + - name: Copy a file to a new generation data set. + zos_copy: + src: /path/to/uss/src + dest: HLQ.TEST.GDG(+1) + remote_src: true + + - name: Copy a local file and take a backup of the existing file with a GDS. + zos_copy: + src: /path/to/local/file + dest: /path/to/dest + backup: true + backup_name: HLQ.BACKUP.GDG(+1) + diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 13559258e..8172ca0bf 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -15,6 +15,8 @@ __metaclass__ = type +import traceback +from os import path from ansible.module_utils.six import PY3 from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, @@ -25,12 +27,19 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import ( ikjeft01 ) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \ + ZOAUImportError if PY3: from shlex import quote else: from pipes import quote +try: + from zoautil_py import datasets, gdgs +except Exception: + datasets = ZOAUImportError(traceback.format_exc()) + gdgs = ZOAUImportError(traceback.format_exc()) REPRO = """ REPRO INDATASET({}) - OUTDATASET({}) REPLACE """ @@ -216,6 +225,49 @@ def copy_pds2uss(src, dest, is_binary=False, asa_text=False): return rc, out, err +def copy_gdg2uss(src, dest, is_binary=False, asa_text=False): + """Copy a whole GDG to a USS path. + + Parameters + ---------- + src : str + The MVS data set to be copied, it must be a generation data group. + dest : str + The destination USS path. + + Keyword Parameters + ------------------ + is_binary : bool + Whether the file to be copied contains binary data. + asa_text : bool + Whether the file to be copied contains ASA control + characters. + + Returns + ------- + bool + True if all copies were successful, False otherwise. + """ + src_view = gdgs.GenerationDataGroupView(src) + generations = src_view.generations() + + copy_args = { + "options": "" + } + + if is_binary or asa_text: + copy_args["options"] = "-B" + + for gds in generations: + dest_file = path.join(dest, gds.name) + rc = datasets.copy(gds.name, dest_file, **copy_args) + + if rc != 0: + return False + + return True + + def copy_uss2uss_binary(src, dest): """Copy a USS file to a USS location in binary mode. diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 568fbe4a6..60cf56061 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -343,6 +343,73 @@ def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vo if rc != 0: raise MVSCmdExecError(rc, out, err) + @staticmethod + def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None): + """ + Allocates a new current generation of a generation data group using a model + data set to set its attributes. + + Parameters + ---------- + ds_name : str + Name of the data set that will be allocated. It must be a GDS + relative name. + model : str + The name of the data set whose allocation parameters + should be used to allocate the new data set. + executable : bool, optional + Whether the new data set should support executables. + asa_text : bool, optional + Whether the new data set should support ASA control + characters (have record format FBA). + vol : str, optional + The volume where the new data set should be allocated. + + Returns + ------- + str + Absolute name of the newly allocated generation data set. + + Raises + ------ + DatasetCreateError + When the allocation fails. + """ + model_attributes = datasets.list_datasets(model)[0] + dataset_type = model_attributes.organization + record_format = model_attributes.record_format + + if executable: + dataset_type = "library" + elif dataset_type in DataSet.MVS_SEQ: + dataset_type = "seq" + elif dataset_type in DataSet.MVS_PARTITIONED: + dataset_type = "pdse" + + if asa_text: + record_format = "fba" + elif executable: + record_format = "u" + + data_set_object = MVSDataSet( + name=ds_name, + data_set_type=dataset_type, + state="absent", + record_format=record_format, + volumes=vol, + block_size=model_attributes.block_size, + record_length=model_attributes.record_length, + space_primary=model_attributes.total_space, + space_type='' + ) + + success = data_set_object.ensure_present() + if not success: + raise DatasetCreateError( + data_set=ds_name, + msg=f"Error while trying to allocate {ds_name}." + ) + @staticmethod def data_set_cataloged(name, volumes=None): """Determine if a data set is in catalog. diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index e9766bd22..ebd6eb722 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -76,6 +76,8 @@ - If C(dest) is a data set member and C(backup_name) is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + - If I(backup_name) is a generation data set (GDS), it must be a relative + positive name (for example, V(HLQ.USER.GDG(+1\))). required: false type: str content: @@ -122,6 +124,10 @@ - When C(dest) is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the C(volume) option. + - C(dest) can be a previously allocated generation data set (GDS) or a new GDS. + - When C(dest) is a generation data group (GDG), C(src) must be a GDG too. The copy + will allocate successive new generations in C(dest), the module will verify + it has enough available generations before starting the copy operations. - When C(dest) is a data set, you can override storage management rules by specifying C(volume) if the storage class being used has GUARANTEED_SPACE=YES specified, otherwise, the allocation will @@ -308,6 +314,9 @@ - If C(src) is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. - If C(src) is a VSAM data set, C(dest) must also be a VSAM. + - If C(src) is a generation data set (GDS), it must be a previously allocated one. + - If C(src) is a generation data group (GDG), C(dest) can be another GDG or a USS + directory. - Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. - Required unless using C(content). @@ -338,6 +347,7 @@ dest_data_set: description: - Data set attributes to customize a C(dest) data set to be copied into. + - Some attributes only apply when C(dest) is a generation data group (GDG). required: false type: dict suboptions: @@ -357,6 +367,7 @@ - member - basic - library + - gdg space_primary: description: - If the destination I(dest) data set does not exist , this sets the @@ -452,6 +463,55 @@ - Note that all non-linear VSAM datasets are SMS-managed. type: str required: false + limit: + description: + - Sets the I(limit) attribute for a GDG. + - Specifies the maximum number, from 1 to 255(up to 999 if extended), of + generations that can be associated with the GDG being defined. + - I(limit) is required when I(type=gdg). + type: int + required: false + empty: + description: + - Sets the I(empty) attribute for a GDG. + - If false, removes only the oldest GDS entry when a new GDS is created + that causes GDG limit to be exceeded. + - If true, removes all GDS entries from a GDG base when a new GDS is + created that causes the GDG limit to be exceeded. + type: bool + required: false + scratch: + description: + - Sets the I(scratch) attribute for a GDG. + - Specifies what action is to be taken for a generation data set located + on disk volumes when the data set is uncataloged from the GDG base as + a result of EMPTY/NOEMPTY processing. + type: bool + required: false + purge: + description: + - Sets the I(purge) attribute for a GDG. + - Specifies whether to override expiration dates when a generation data + set (GDS) is rolled off and the C(scratch) option is set. + type: bool + required: false + extended: + description: + - Sets the I(extended) attribute for a GDG. + - If false, allow up to 255 generation data sets (GDSs) to be associated + with the GDG. + - If true, allow up to 999 generation data sets (GDS) to be associated + with the GDG. + type: bool + required: false + fifo: + description: + - Sets the I(fifo) attribute for a GDG. + - If false, the order is the newest GDS defined to the oldest GDS. + This is the default value. + - If true, the order is the oldest GDS defined to the newest GDS. + type: bool + required: false extends_documentation_fragment: - ibm.ibm_zos_core.template @@ -679,6 +739,19 @@ src: ./files/print.txt dest: HLQ.PRINT.NEW asa_text: true + +- name: Copy a file to a new generation data set. + zos_copy: + src: /path/to/uss/src + dest: HLQ.TEST.GDG(+1) + remote_src: true + +- name: Copy a local file and take a backup of the existing file with a GDS. + zos_copy: + src: /path/to/local/file + dest: /path/to/dest + backup: true + backup_name: HLQ.BACKUP.GDG(+1) """ RETURN = r""" @@ -868,10 +941,11 @@ from re import match as fullmatch try: - from zoautil_py import datasets, opercmd + from zoautil_py import datasets, opercmd, gdgs except Exception: datasets = ZOAUImportError(traceback.format_exc()) opercmd = ZOAUImportError(traceback.format_exc()) + gdgs = ZOAUImportError(traceback.format_exc()) try: from zoautil_py import exceptions as zoau_exceptions @@ -1057,6 +1131,41 @@ def copy_to_vsam(self, src, dest): cmd=repro_cmd, ) + def copy_to_gdg(self, src, dest): + """ + Copy each allocated generation in src to dest. + + Parameters + ---------- + src : str + Name of the source GDG. + dest : str + Name of the destination GDG. + + Returns + ------ + bool + True if every copy operation was successful, False otherwise. + """ + src_view = gdgs.GenerationDataGroupView(src) + generations = src_view.generations() + dest_generation = f"{dest}(+1)" + + copy_args = { + "options": "" + } + + if self.is_binary or self.asa_text: + copy_args["options"] = "-B" + + for gds in generations: + rc = datasets.copy(gds.name, dest_generation, **copy_args) + + if rc != 0: + return False + + return True + def _copy_tree(self, entries, src, dest, dirs_exist_ok=False): """Recursively copy USS directory to another USS directory. This function was created to circumvent using shutil.copytree @@ -1451,7 +1560,7 @@ def copy_to_uss( """ changed_files = None - if src_ds_type in data_set.DataSet.MVS_SEQ.union(data_set.DataSet.MVS_PARTITIONED): + if src_ds_type in data_set.DataSet.MVS_SEQ.union(data_set.DataSet.MVS_PARTITIONED) or src_ds_type == "GDG": self._mvs_copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name ) @@ -1743,7 +1852,7 @@ def _mvs_copy_to_uss( # the same name as the member. dest = "{0}/{1}".format(dest, member_name or src) - if src_ds_type in data_set.DataSet.MVS_PARTITIONED and not src_member: + if (src_ds_type in data_set.DataSet.MVS_PARTITIONED and not src_member) or src_ds_type == "GDG": try: os.mkdir(dest) except FileExistsError: @@ -1775,7 +1884,19 @@ def _mvs_copy_to_uss( stderr=response.stderr_response ) else: - if self.executable: + if src_ds_type == "GDG": + result = copy.copy_gdg2uss( + src, + dest, + is_binary=self.is_binary, + asa_text=self.asa_text + ) + + if not result: + raise CopyOperationError( + msg=f"Error while copying GDG {src} to {dest}" + ) + elif self.executable: try: datasets.copy(src, dest, alias=True, executable=True) except zoau_exceptions.ZOAUException as copy_exception: @@ -2309,7 +2430,9 @@ def is_compatible( executable, asa_text, src_has_asa_chars, - dest_has_asa_chars + dest_has_asa_chars, + is_src_gds, + is_dest_gds ): """Determine whether the src and dest are compatible and src can be copied to dest. @@ -2336,6 +2459,10 @@ def is_compatible( Whether the src contains ASA control characters. dest_has_asa_chars : bool Whether the dest contains ASA control characters. + is_src_gds : bool + Whether the src is a generation data set. + is_dest_gds : bool + Whether the dest is a generation data set. Returns ------- @@ -2365,6 +2492,38 @@ def is_compatible( if asa_text: return src_has_asa_chars or dest_has_asa_chars + # ******************************************************************** + # When either the src or dest are GDSs, the other cannot be a VSAM + # data set, since GDGs don't support VSAMs. + # ******************************************************************** + if is_src_gds and dest_type in data_set.DataSet.MVS_VSAM: + return False + if is_dest_gds and src_type in data_set.DataSet.MVS_VSAM: + return False + + # ******************************************************************** + # When copying a complete GDG, we'll only allow a copy to another GDG + # or to a USS directory. + # ******************************************************************** + if src_type == "GDG": + if dest_type == "GDG" or dest_type == "USS": + return True + else: + return False + + # ******************************************************************** + # And when copying into a GDG (not GDS), we'll only allow the copy of + # another GDG. To allow copy from a USS directory would require making + # sure the path contains only one sublevel and then deciding if every + # subdir will represent a PDS/E as a generation, which for now will be + # left as another item for future discussion/development. + # ******************************************************************** + if dest_type == "GDG": + if src_type == "GDG": + return True + else: + return False + # ******************************************************************** # If source is a sequential data set, then destination must be # partitioned data set member, other sequential data sets or USS files. @@ -2490,6 +2649,18 @@ def does_destination_allow_copy( if dest_type in data_set.DataSet.MVS_PARTITIONED and dest_exists and member_exists and not force: return False + # When the destination is an existing GDG, we'll check that we have enough free generations + # to copy the complete source. + if dest_exists and dest_type == "GDG": + src_view = gdgs.GenerationDataGroupView(src) + dest_view = gdgs.GenerationDataGroupView(dest) + + src_allocated_gens = len(src_view.generations()) + dest_allocated_gens = len(dest_view.generations()) + + if src_allocated_gens > (dest_view.limit - dest_allocated_gens): + return False + return True @@ -2654,6 +2825,8 @@ def allocate_destination_data_set( is_binary, executable, asa_text, + is_gds, + is_active_gds, dest_data_set=None, volume=None ): @@ -2681,6 +2854,10 @@ def allocate_destination_data_set( Whether the data to copy is an executable dataset or file. asa_text : bool Whether the data to copy has ASA control characters. + is_gds : bool + Whether the destination is a generation data set. + is_gds_active : bool + Whether the destination GDS is already allocated. dest_data_set : dict, optional Parameters containing a full definition of the new data set; they will take precedence over any other allocation logic. @@ -2705,15 +2882,41 @@ def allocate_destination_data_set( # Create the dict that will contains the values created by the module if it's empty action module will # not display the content. dest_params = {} - if dest_exists and is_dest_empty: - return False, dest_params + if dest_exists and (is_dest_empty or dest_ds_type == "GDG"): + return False, dest_params, dest # Giving more priority to the parameters given by the user. # Cover case the user set executable to true to create dataset valid. if dest_data_set: - dest_params = dest_data_set - dest_params["name"] = dest - data_set.DataSet.ensure_present(replace=force, **dest_params) + if dest_ds_type == "GDG": + if not dest_data_set.get("limit"): + raise CopyOperationError(msg=f"Destination {dest} is missing its 'limit' attribute.") + + gdgs.create( + dest, + dest_data_set.get("limit"), + empty=dest_data_set.get("empty", False), + scratch=dest_data_set.get("scratch", False), + purge=dest_data_set.get("purge", False), + extended=dest_data_set.get("extended", False), + fifo=dest_data_set.get("fifo", False) + ) + + # Checking the new GDG was allocated. + results = gdgs.list_gdg_names(dest) + if len(results) == 0: + raise CopyOperationError(msg=f"Error while allocating GDG {dest}.") + else: + dest_params = dest_data_set + dest_params["name"] = dest + # Removing GDG specific options. + del dest_params["limit"] + del dest_params["empty"] + del dest_params["scratch"] + del dest_params["purge"] + del dest_params["extended"] + del dest_params["fifo"] + data_set.DataSet.ensure_present(replace=force, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_SEQ: volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) @@ -2722,7 +2925,11 @@ def allocate_destination_data_set( # Taking the temp file when a local file was copied with sftp. create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) + # Only applying the GDS special case when we don't have an absolute name. + if is_gds and not is_active_gds: + data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) + else: + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: temp_dump = None try: @@ -2746,7 +2953,11 @@ def allocate_destination_data_set( elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED and not dest_exists: # Taking the src as model if it's also a PDSE. if src_ds_type in data_set.DataSet.MVS_PARTITIONED: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume) + # Only applying the GDS special case when we don't have an absolute name. + if is_gds and not is_active_gds: + data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) + else: + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.list_datasets(src_name)[0] # The size returned by listing is in bytes. @@ -2828,7 +3039,29 @@ def allocate_destination_data_set( volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) - if dest_ds_type not in data_set.DataSet.MVS_VSAM: + elif dest_ds_type == "GDG": + src_view = gdgs.GenerationDataGroupView(src) + + gdgs.create( + dest, + src_view.limit, + empty=src_view.empty, + scratch=src_view.scratch, + purge=src_view.purge, + extended=src_view.extended, + fifo=True if src_view.order.upper() == "FIFO" else False + ) + + # Checking the new GDG was allocated. + results = gdgs.list_gdg_names(dest) + if len(results) == 0: + raise CopyOperationError(msg=f"Error while allocating GDG {dest}.") + + if is_gds and not is_active_gds: + gdg_name = data_set.extract_dsname(dest) + dest = data_set.DataSet.resolve_gds_absolute_name(f"{gdg_name}(0)") + + if dest_ds_type not in data_set.DataSet.MVS_VSAM and dest_ds_type != "GDG": dest_params = get_attributes_of_any_dataset_created( dest, src_ds_type, @@ -2842,7 +3075,8 @@ def allocate_destination_data_set( record_format = dest_attributes.record_format dest_params["type"] = dest_ds_type dest_params["record_format"] = record_format - return True, dest_params + + return True, dest_params, dest def normalize_line_endings(src, encoding=None): @@ -3016,9 +3250,28 @@ def run_module(module, arg_def): # that we used to pass from the action plugin. is_src_dir = os.path.isdir(src) is_uss = "/" in dest - is_mvs_dest = is_data_set(dest) + is_mvs_src = is_data_set(data_set.extract_dsname(src)) + is_src_gds = data_set.DataSet.is_gds_relative_name(src) + is_mvs_dest = is_data_set(data_set.extract_dsname(dest)) + is_dest_gds = data_set.DataSet.is_gds_relative_name(dest) + is_dest_gds_active = False is_pds = is_src_dir and is_mvs_dest src_member = is_member(src) + raw_src = src + raw_dest = dest + + # Implementing the new MVSDataSet class by masking the values of + # src/raw_src and dest/raw_dest. + if is_mvs_src: + src_data_set_object = data_set.MVSDataSet(src) + src = src_data_set_object.name + raw_src = src_data_set_object.raw_name + + if is_mvs_dest: + dest_data_set_object = data_set.MVSDataSet(dest) + dest = dest_data_set_object.name + raw_dest = dest_data_set_object.raw_name + is_dest_gds_active = dest_data_set_object.is_gds_active # ******************************************************************** # When copying to and from a data set member, 'dest' or 'src' will be @@ -3048,9 +3301,9 @@ def run_module(module, arg_def): src = os.path.realpath(src) if not os.path.exists(src): - module.fail_json(msg="Source {0} does not exist".format(src)) + module.fail_json(msg="Source {0} does not exist".format(raw_src)) if not os.access(src, os.R_OK): - module.fail_json(msg="Source {0} is not readable".format(src)) + module.fail_json(msg="Source {0} is not readable".format(raw_src)) if mode == "preserve": mode = "0{0:o}".format(stat.S_IMODE(os.stat(src).st_mode)) @@ -3103,12 +3356,13 @@ def run_module(module, arg_def): copy_handler = CopyHandler(module, is_binary=is_binary) copy_handler._tag_file_encoding(converted_src, "UTF-8") else: - if data_set.DataSet.data_set_exists(src_name): + if (is_src_gds and data_set.DataSet.data_set_exists(src)) or ( + not is_src_gds and data_set.DataSet.data_set_exists(src_name)): if src_member and not data_set.DataSet.data_set_member_exists(src): raise NonExistentSourceError(src) src_ds_type = data_set.DataSet.data_set_type(src_name) - if src_ds_type not in data_set.DataSet.MVS_VSAM: + if src_ds_type not in data_set.DataSet.MVS_VSAM and src_ds_type != "GDG": src_attributes = datasets.list_datasets(src_name)[0] if src_attributes.record_format == 'FBA' or src_attributes.record_format == 'VBA': src_has_asa_chars = True @@ -3143,26 +3397,33 @@ def run_module(module, arg_def): dest_exists = os.path.exists(dest) if dest_exists and not os.access(dest, os.W_OK): - module.fail_json(msg="Destination {0} is not writable".format(dest)) + module.fail_json(msg="Destination {0} is not writable".format(raw_dest)) else: dest_exists = data_set.DataSet.data_set_exists(dest_name, volume) dest_ds_type = data_set.DataSet.data_set_type(dest_name, volume) + # When dealing with a new generation, we'll override its type to None + # so it will be the same type as the source (or whatever dest_data_set has) + # a couple lines down. + if is_dest_gds and not is_dest_gds_active: + dest_exists = False + dest_ds_type = None + # dest_data_set.type overrides `dest_ds_type` given precedence rules if dest_data_set and dest_data_set.get("type"): dest_ds_type = dest_data_set.get("type").upper() elif executable: - """ When executable is selected and dest_exists is false means an executable PDSE was copied to remote, - so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY. - Not using LIBRARY at this step since there are many checks with dest_ds_type in data_set.DataSet.MVS_PARTITIONED - and LIBRARY is not in MVS_PARTITIONED frozen set.""" + # When executable is selected and dest_exists is false means an executable PDSE was copied to remote, + # so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY. + # Not using LIBRARY at this step since there are many checks with dest_ds_type in data_set.DataSet.MVS_PARTITIONED + # and LIBRARY is not in MVS_PARTITIONED frozen set. dest_ds_type = "PDSE" if dest_data_set and (dest_data_set.get('record_format', '') == 'fba' or dest_data_set.get('record_format', '') == 'vba'): dest_has_asa_chars = True elif not dest_exists and asa_text: dest_has_asa_chars = True - elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM: + elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM and dest_ds_type != "GDG": dest_attributes = datasets.list_datasets(dest_name)[0] if dest_attributes.record_format == 'FBA' or dest_attributes.record_format == 'VBA': dest_has_asa_chars = True @@ -3181,6 +3442,14 @@ def run_module(module, arg_def): except Exception as err: module.fail_json(msg=str(err)) + # Checking that we're dealing with a positive generation when dest does not + # exist. + if is_dest_gds and not is_dest_gds_active: + # extract_member_name also works to extract the generation. + dest_generation = int(data_set.extract_member_name(dest)) + if dest_generation < 1: + module.fail_json(msg=f"Cannot copy to {dest}, the generation data set is not allocated.") + # ******************************************************************** # Some src and dest combinations are incompatible. For example, it is # not possible to copy a PDS member to a VSAM data set or a USS file @@ -3197,7 +3466,9 @@ def run_module(module, arg_def): executable, asa_text, src_has_asa_chars, - dest_has_asa_chars + dest_has_asa_chars, + is_src_gds, + is_dest_gds ): error_msg = "Incompatible target type '{0}' for source '{1}'".format( dest_ds_type, src_ds_type @@ -3220,7 +3491,10 @@ def run_module(module, arg_def): is_dest_lock = data_set_locked(dest_name) if is_dest_lock: module.fail_json( - msg="Unable to write to dest '{0}' because a task is accessing the data set.".format(dest_name)) + msg="Unable to write to dest '{0}' because a task is accessing the data set.".format( + data_set.extract_dsname(raw_dest) + ) + ) # ******************************************************************** # Alias support is not avaiable to and from USS for text-based data sets. @@ -3254,6 +3528,16 @@ def run_module(module, arg_def): # The partitioned data set is empty res_args["note"] = "Destination is empty, backup request ignored" else: + if backup_name: + backup_data_set = data_set.MVSDataSet(backup_name) + if backup_data_set.is_gds_active: + module.fail_json( + msg=( + f"The generation data set {backup_name} cannot be used as backup. " + "Please use a new generation for this purpose." + ) + ) + backup_name = backup_data(dest, dest_ds_type, backup_name, tmphlq) # ******************************************************************** @@ -3281,7 +3565,7 @@ def run_module(module, arg_def): or (src and os.path.isdir(src) and is_mvs_dest) ): dest_ds_type = "PDSE" - elif src_ds_type in data_set.DataSet.MVS_VSAM: + elif src_ds_type in data_set.DataSet.MVS_VSAM or src_ds_type == "GDG": dest_ds_type = src_ds_type elif not is_uss: dest_ds_type = "SEQ" @@ -3305,7 +3589,7 @@ def run_module(module, arg_def): volume ): module.fail_json( - msg="{0} already exists on the system, unable to overwrite unless force=True is specified.".format(dest), + msg="{0} already exists on the system, unable to overwrite unless force=True is specified.".format(raw_dest), changed=False, dest=dest ) @@ -3319,15 +3603,18 @@ def run_module(module, arg_def): try: if not is_uss: - res_args["changed"], res_args["dest_data_set_attrs"] = allocate_destination_data_set( + res_args["changed"], res_args["dest_data_set_attrs"], resolved_dest = allocate_destination_data_set( src, - dest_name, src_ds_type, + dest_name if not is_dest_gds else dest, + src_ds_type, dest_ds_type, dest_exists, force, is_binary, executable, asa_text, + is_dest_gds, + is_dest_gds_active, dest_data_set=dest_data_set, volume=volume ) @@ -3342,6 +3629,10 @@ def run_module(module, arg_def): if converted_src: src = original_src + # Overriding the dest name with the current generation just allocated. + if not dest_exists and is_dest_gds: + dest = dest_name = resolved_dest + # ******************************************************************** # Encoding conversion is only valid if the source is a local file, # local directory or a USS file/directory. @@ -3456,6 +3747,13 @@ def run_module(module, arg_def): res_args["changed"] = True dest = dest.upper() + # ------------------------------- o ----------------------------------- + # Copy to a GDG + # --------------------------------------------------------------------- + elif dest_ds_type == "GDG": + copy_handler.copy_to_gdg(src, dest) + res_args["changed"] = True + # ------------------------------- o ----------------------------------- # Copy to VSAM data set # --------------------------------------------------------------------- @@ -3524,7 +3822,8 @@ def main(): type=dict( type='str', choices=['basic', 'ksds', 'esds', 'rrds', - 'lds', 'seq', 'pds', 'pdse', 'member', 'library'], + 'lds', 'seq', 'pds', 'pdse', 'member', + 'library', 'gdg'], required=True, ), space_primary=dict( @@ -3549,6 +3848,12 @@ def main(): sms_storage_class=dict(type="str", required=False), sms_data_class=dict(type="str", required=False), sms_management_class=dict(type="str", required=False), + limit=dict(type="int", required=False), + empty=dict(type="bool", required=False), + scratch=dict(type="bool", required=False), + purge=dict(type="bool", required=False), + extended=dict(type="bool", required=False), + fifo=dict(type="bool", required=False), ) ), use_template=dict(type='bool', default=False), @@ -3619,6 +3924,12 @@ def main(): sms_storage_class=dict(arg_type="str", required=False), sms_data_class=dict(arg_type="str", required=False), sms_management_class=dict(arg_type="str", required=False), + limit=dict(arg_type="int", required=False), + empty=dict(arg_type="bool", required=False), + scratch=dict(arg_type="bool", required=False), + purge=dict(arg_type="bool", required=False), + extended=dict(arg_type="bool", required=False), + fifo=dict(arg_type="bool", required=False), ) ), diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 086b7d27e..86ba441a6 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -133,9 +133,6 @@ STOP RUN.\n """ - - - # format params for LINK_JCL: # {0} - cobol src pds dsn # {1} - cobol src pds member @@ -4698,3 +4695,614 @@ def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): finally: hosts.all.file(path=options["dest"], state="absent") + +@pytest.mark.parametrize("generation", ["0", "+1"]) +def test_copy_seq_gds_inexistent_src(ansible_zos_module, generation): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {src_data_set}") + + copy_results = hosts.all.zos_copy( + src=f"{src_data_set}({generation})", + dest=dest_data_set, + remote_src=True + ) + + for cp_res in copy_results.contacted.values(): + assert cp_res.get("msg") is not None + assert cp_res.get("changed") is False + assert cp_res.get("failed") is True + finally: + hosts.all.shell(cmd=f"drm {src_data_set}") + + +def test_copy_seq_gds_to_data_set(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {src_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{src_data_set}(+1)" """) + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}(0)" """) + + copy_results = hosts.all.zos_copy( + src=f"{src_data_set}(0)", + dest=dest_data_set, + remote_src=True + ) + + verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}" """) + + for cp_res in copy_results.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_data_set + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") != "" + finally: + hosts.all.shell(cmd=f"""drm "{src_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {src_data_set}") + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_data_set_to_new_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tSEQ {src_data_set}") + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}" """) + + copy_results = hosts.all.zos_copy( + src=src_data_set, + dest=f"{dest_data_set}(+1)", + remote_src=True + ) + + verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) + + # Checking that we got a source of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for cp_res in copy_results.contacted.values(): + dest = cp_res.get("dest", "") + + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") != "" + finally: + hosts.all.shell(cmd=f"drm {src_data_set}") + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_uss_file_to_new_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_file = "/etc/profile" + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + + copy_results = hosts.all.zos_copy( + src=src_file, + dest=f"{dest_data_set}(+1)", + remote_src=True + ) + + verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) + + # Checking that we got a source of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for cp_res in copy_results.contacted.values(): + dest = cp_res.get("dest", "") + + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") != "" + finally: + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_pds_to_new_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + member_src = f"{src_data_set}(MEMBER)" + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tPDS {src_data_set}") + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{member_src}" """) + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + + copy_results = hosts.all.zos_copy( + src=src_data_set, + dest=f"{dest_data_set}(+1)", + remote_src=True + ) + + verify_copy = hosts.all.shell(cmd=f"""mls "{dest_data_set}(0)" """) + + # Checking that we got a source of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for cp_res in copy_results.contacted.values(): + dest = cp_res.get("dest", "") + + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") != "" + finally: + hosts.all.shell(cmd=f"drm {src_data_set}") + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_data_set_to_previous_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tSEQ {src_data_set}") + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{dest_data_set}(+1)" """) + + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}" """) + hosts.all.shell(cmd=f"""decho "A record." "{dest_data_set}(0)" """) + + copy_results = hosts.all.zos_copy( + src=src_data_set, + dest=f"{dest_data_set}(0)", + remote_src=True, + force=True + ) + + verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) + + # Checking that we got a source of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for cp_res in copy_results.contacted.values(): + dest = cp_res.get("dest", "") + + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") != "" + finally: + hosts.all.shell(cmd=f"drm {src_data_set}") + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_uss_file_to_previous_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_file = "/etc/profile" + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{dest_data_set}(+1)" """) + hosts.all.shell(cmd=f"""decho "A record." "{dest_data_set}(0)" """) + + copy_results = hosts.all.zos_copy( + src=src_file, + dest=f"{dest_data_set}(0)", + remote_src=True, + force=True + ) + + verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) + + # Checking that we got a source of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for cp_res in copy_results.contacted.values(): + dest = cp_res.get("dest", "") + + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") != "" + finally: + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_pds_member_to_previous_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + member_src = f"{src_data_set}(MEMBER)" + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tPDS {src_data_set}") + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{member_src}" """) + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{dest_data_set}(+1)" """) + hosts.all.shell(cmd=f"""decho "A record." "{dest_data_set}(0)" """) + + copy_results = hosts.all.zos_copy( + src=member_src, + dest=f"{dest_data_set}(0)", + remote_src=True, + force=True + ) + + verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) + + # Checking that we got a source of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for cp_res in copy_results.contacted.values(): + dest = cp_res.get("dest", "") + + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") != "" + finally: + hosts.all.shell(cmd=f"drm {src_data_set}") + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_pds_to_previous_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + member_src = f"{src_data_set}(MEMBER)" + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tPDSE {src_data_set}") + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{member_src}" """) + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + hosts.all.shell(cmd=f"""dtouch -tPDS "{dest_data_set}(+1)" """) + + copy_results = hosts.all.zos_copy( + src=src_data_set, + dest=f"{dest_data_set}(0)", + remote_src=True, + force=True + ) + + verify_copy = hosts.all.shell(cmd=f"""mls "{dest_data_set}(0)" """) + + # Checking that we got a source of the form: ANSIBLE.DATA.SET.G0001V01. + gds_pattern = r"G[0-9]+V[0-9]+" + + for cp_res in copy_results.contacted.values(): + dest = cp_res.get("dest", "") + + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + assert v_cp.get("stdout") != "" + finally: + hosts.all.shell(cmd=f"drm {src_data_set}") + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_data_set_to_previous_gds_no_force(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tSEQ {src_data_set}") + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{dest_data_set}(+1)" """) + + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}" """) + hosts.all.shell(cmd=f"""decho "A record." "{dest_data_set}(0)" """) + + copy_results = hosts.all.zos_copy( + src=src_data_set, + dest=f"{dest_data_set}(0)", + remote_src=True, + force=False + ) + + for cp_res in copy_results.contacted.values(): + assert cp_res.get("msg") is not None + assert cp_res.get("changed") is False + assert cp_res.get("failed") is True + finally: + hosts.all.shell(cmd=f"drm {src_data_set}") + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +@pytest.mark.parametrize("generation", [0, -1]) +def test_copy_data_set_to_previous_non_existent_gds(ansible_zos_module, generation): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tSEQ {src_data_set}") + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + if generation < 0: + hosts.all.shell(cmd=f"""dtouch -tSEQ "{dest_data_set}(+1)" """) + + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}" """) + + copy_results = hosts.all.zos_copy( + src=src_data_set, + # Copying to a previous generation that doesn't exist. + dest=f"{dest_data_set}({generation})", + remote_src=True, + force=True + ) + + for cp_res in copy_results.contacted.values(): + assert cp_res.get("msg") is not None + assert "generation data set is not allocated" in cp_res.get("msg") + assert cp_res.get("changed") is False + assert cp_res.get("failed") is True + finally: + hosts.all.shell(cmd=f"drm {src_data_set}") + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_gdg_to_uss_dir(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest = "/tmp/zos_copy_gdg" + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {src_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{src_data_set}(+1)" """) + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}(0)" """) + + hosts.all.file(path=dest, state="directory") + + copy_results = hosts.all.zos_copy( + src=src_data_set, + dest=dest, + remote_src=True + ) + + verify_dest = hosts.all.shell(cmd=f"ls {dest}/{src_data_set}") + + for cp_res in copy_results.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + for v_res in verify_dest.contacted.values(): + assert v_res.get("rc") == 0 + assert len(v_res.get("stdout_lines", [])) > 0 + finally: + hosts.all.shell(cmd=f"""drm "{src_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {src_data_set}") + hosts.all.file(path=dest, state="absent") + + +@pytest.mark.parametrize("new_gdg", [True, False]) +def test_copy_gdg_to_gdg(ansible_zos_module, new_gdg): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {src_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{src_data_set}(+1)" """) + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}(0)" """) + hosts.all.shell(cmd=f"""dtouch -tSEQ "{src_data_set}(+1)" """) + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}(0)" """) + + if not new_gdg: + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {dest_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{dest_data_set}(+1)" """) + + copy_results = hosts.all.zos_copy( + src=src_data_set, + dest=dest_data_set, + remote_src=True + ) + + for cp_res in copy_results.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + finally: + hosts.all.shell(cmd=f"""drm "{src_data_set}(-1)" """) + hosts.all.shell(cmd=f"""drm "{src_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {src_data_set}") + + if not new_gdg: + hosts.all.shell(cmd=f"""drm "{dest_data_set}(-2)" """) + hosts.all.shell(cmd=f"""drm "{dest_data_set}(-1)" """) + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_copy_gdg_to_gdg_dest_attributes(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {src_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{src_data_set}(+1)" """) + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}(0)" """) + hosts.all.shell(cmd=f"""dtouch -tSEQ "{src_data_set}(+1)" """) + hosts.all.shell(cmd=f"""decho "{DUMMY_DATA}" "{src_data_set}(0)" """) + + copy_results = hosts.all.zos_copy( + src=src_data_set, + dest=dest_data_set, + remote_src=True, + dest_data_set={ + "type": "gdg", + "limit": 5, + "empty": False, + "scratch": True, + "purge": True, + "extended": False, + "fifo": False + } + ) + + for cp_res in copy_results.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + finally: + hosts.all.shell(cmd=f"""drm "{src_data_set}(-1)" """) + hosts.all.shell(cmd=f"""drm "{src_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {src_data_set}") + + hosts.all.shell(cmd=f"""drm "{dest_data_set}(-1)" """) + hosts.all.shell(cmd=f"""drm "{dest_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {dest_data_set}") + + +def test_backup_gds(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tSEQ {src_data_set}") + hosts.all.shell(cmd=f"dtouch -tSEQ {dest_data_set}") + hosts.all.shell(cmd=f"decho \"{DUMMY_DATA}\" \"{src_data_set}\"") + hosts.all.shell(cmd=f"decho \"A record\" \"{dest_data_set}\"") + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {backup_data_set}") + + results = hosts.all.zos_copy( + src=src_data_set, + dest=dest_data_set, + remote_src=True, + force=True, + backup=True, + backup_name=f"{backup_data_set}(+1)", + ) + + backup_check = hosts.all.shell( + cmd=f"""dcat "{backup_data_set}(0)" | wc -l """ + ) + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("msg") is None + + for result in backup_check.contacted.values(): + assert result.get("rc") == 0 + assert int(result.get("stdout")) > 0 + + finally: + hosts.all.shell(cmd=f"""drm "{backup_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {backup_data_set}") + hosts.all.shell(cmd=f"drm {dest_data_set}") + hosts.all.shell(cmd=f"drm {src_data_set}") + + +def test_backup_gds_invalid_generation(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + + hosts.all.shell(cmd=f"dtouch -tSEQ {src_data_set}") + hosts.all.shell(cmd=f"dtouch -tSEQ {dest_data_set}") + + hosts.all.shell(cmd=f"decho \"{DUMMY_DATA}\" \"{src_data_set}\"") + hosts.all.shell(cmd=f"decho \"{DUMMY_DATA}\" \"{dest_data_set}\"") + + hosts.all.shell(cmd=f"dtouch -tGDG -L3 {backup_data_set}") + hosts.all.shell(cmd=f"""dtouch -tSEQ "{backup_data_set}(+1)" """) + + results = hosts.all.zos_copy( + src=src_data_set, + dest=dest_data_set, + remote_src=True, + force=True, + backup=True, + backup_name=f"{backup_data_set}(0)", + ) + + for result in results.contacted.values(): + assert result.get("failed") is True + assert result.get("changed") is False + assert result.get("msg") is not None + assert "cannot be used" in result.get("msg") + + finally: + hosts.all.shell(cmd=f"""drm "{backup_data_set}(0)" """) + hosts.all.shell(cmd=f"drm {backup_data_set}") + hosts.all.shell(cmd=f"drm {dest_data_set}") + hosts.all.shell(cmd=f"drm {src_data_set}") + + +def test_copy_to_dataset_with_special_symbols(ansible_zos_module): + hosts = ansible_zos_module + + try: + src_data_set = get_tmp_ds_name() + dest_data_set = get_tmp_ds_name(symbols=True) + + hosts.all.shell(cmd=f"dtouch -tSEQ {src_data_set}") + hosts.all.shell(cmd=f"decho \"{DUMMY_DATA}\" \"{src_data_set}\"") + + results = hosts.all.zos_copy( + src=src_data_set, + dest=dest_data_set, + remote_src=True + ) + + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("msg") is None + + finally: + hosts.all.zos_data_set(name=src_data_set, state="absent") + hosts.all.zos_data_set(name=dest_data_set, state="absent") From 43c7272fcd126d31c48c5384d05e999ce15c10a5 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Thu, 11 Jul 2024 12:34:12 -0600 Subject: [PATCH 433/495] Enabler/1379/zos tso command gdg support (#1563) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add test case and support for gds * Add functional test cases and gds suport for 0 and - notation * modified check for data sets function * Documented tso_command functions * Simplified the data set name find * Removed unused fullmatch import * Added changelog * Adding profile noprefix to zos_tso_command tests * Added changelog fragment * Fixed test about GDS * Modified tests * Fixed pep issue * Added gds relative name check --------- Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> --- .../1563-zos_tso_command-gdg-support.yml | 4 +++ plugins/modules/zos_tso_command.py | 28 ++++++++++++++++ .../modules/test_zos_tso_command_func.py | 33 +++++++++++++++++++ 3 files changed, 65 insertions(+) create mode 100644 changelogs/fragments/1563-zos_tso_command-gdg-support.yml diff --git a/changelogs/fragments/1563-zos_tso_command-gdg-support.yml b/changelogs/fragments/1563-zos_tso_command-gdg-support.yml new file mode 100644 index 000000000..aadbbfa60 --- /dev/null +++ b/changelogs/fragments/1563-zos_tso_command-gdg-support.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_mvs_raw - Added support for GDG and GDS relative name notation to use a data set name. + Added support for data set names with special characters like $, /#, /- and @. + (https://github.com/ansible-collections/ibm_zos_core/pull/1563). \ No newline at end of file diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 2ac4a9d32..017e88cf1 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -126,7 +126,9 @@ """ from ansible.module_utils.basic import AnsibleModule +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import data_set from os import chmod +import re from tempfile import NamedTemporaryFile from stat import S_IEXEC, S_IREAD, S_IWRITE from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( @@ -253,6 +255,31 @@ def list_or_str_type(contents, dependencies): return contents +def preprocess_data_set_names(command): + """ + Applies necessary preprocessing to the data set names, such as converting + a GDS relative name into an absolute one. + + Parameters + ---------- + command : str + command in which to look for a data set name. + + Returns + ------- + str + The command with the modified data set names if any. + + """ + pattern = r"(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)|\((?:[-+]?[0-9]+)\)){0,1}" + data_set_list = re.findall(pattern, command) + for name in data_set_list: + if data_set.DataSet.is_gds_relative_name(name): + dataset_name = data_set.DataSet.resolve_gds_absolute_name(name) + command = command.replace(name, dataset_name) + return command + + def run_module(): """Initialize module. @@ -287,6 +314,7 @@ def run_module(): module.fail_json(msg=repr(e), **result) commands = parsed_args.get("commands") + commands = list(map(preprocess_data_set_names, commands)) max_rc = parsed_args.get("max_rc") if max_rc is None: max_rc = 0 diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index e4665bb71..52e12032b 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -141,3 +141,36 @@ def test_zos_tso_command_maxrc(ansible_zos_module): for item in result.get("output"): assert item.get("rc") < 5 assert result.get("changed") is True + + +def test_zos_tso_command_gds(ansible_zos_module): + try: + hosts = ansible_zos_module + default_data_set = get_tmp_ds_name(3, 3, symbols=True) + hosts.all.shell(cmd="dtouch -tGDG -L2 '{0}'".format(default_data_set)) + hosts.all.shell(cmd="dtouch -tseq '{0}(+1)' ".format(default_data_set)) + hosts.all.shell(cmd="dtouch -tseq '{0}(+1)' ".format(default_data_set)) + print(f"data set name {default_data_set}") + hosts = ansible_zos_module + results = hosts.all.zos_tso_command( + commands=["""LISTDSD DATASET('{0}(0)') ALL GENERIC""".format(default_data_set)], + max_rc=4 + ) + for result in results.contacted.values(): + for item in result.get("output"): + assert result.get("changed") is True + results = hosts.all.zos_tso_command( + commands=["""LISTDSD DATASET('{0}(-1)') ALL GENERIC""".format(default_data_set)], + max_rc=4 + ) + for result in results.contacted.values(): + for item in result.get("output"): + assert result.get("changed") is True + results = hosts.all.zos_tso_command( + commands=["""LISTDS '{0}(-1)'""".format(default_data_set)] + ) + for result in results.contacted.values(): + assert result.get("changed") is True + finally: + None + # hosts.all.shell(cmd="drm ANSIBLE.*".format(default_data_set)) \ No newline at end of file From b7a1b1b75bab2858ba083cfbcfae51e27fa49a3e Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 12 Jul 2024 09:13:21 -0600 Subject: [PATCH 434/495] [Bugfix] [zos_copy] Copy from file to member when dest is just PDS/E name (#1570) * Compute member name for implicit copies * Add test for implicit member copy * Update module doc * Add changelog fragment --- .../1570-compute-member-name-zos_copy.yml | 6 +++ plugins/modules/zos_copy.py | 10 +++++ .../functional/modules/test_zos_copy_func.py | 40 +++++++++++++++++++ 3 files changed, 56 insertions(+) create mode 100644 changelogs/fragments/1570-compute-member-name-zos_copy.yml diff --git a/changelogs/fragments/1570-compute-member-name-zos_copy.yml b/changelogs/fragments/1570-compute-member-name-zos_copy.yml new file mode 100644 index 000000000..d57a94a8f --- /dev/null +++ b/changelogs/fragments/1570-compute-member-name-zos_copy.yml @@ -0,0 +1,6 @@ +bugfixes: + - zos_copy - a regression in version 1.4.0 made the module stop automatically + computing member names when copying a single file into a PDS/E. Fix now + lets a user copy a single file into a PDS/E without adding a member in the + dest option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1570). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index ebd6eb722..46f399360 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -109,6 +109,9 @@ attributes will be computed. If I(executable=true),C(dest) will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. + - If C(src) is a file and C(dest) a partitioned data set, C(dest) does not need to include + a member in its value, the module can automatically compute the resulting member name from + C(src). - When C(dest) is a data set, precedence rules apply. If C(dest_data_set) is set, this will take precedence over an existing data set. If C(dest) is an empty data set, the empty data set will be written with the @@ -3429,6 +3432,13 @@ def run_module(module, arg_def): dest_has_asa_chars = True if dest_ds_type in data_set.DataSet.MVS_PARTITIONED: + # Checking if we need to copy a member when the user requests it implicitly. + # src is a file and dest was just the PDS/E dataset name. + if not copy_member and src_ds_type == "USS" and os.path.isfile(src): + copy_member = True + dest_member = data_set.DataSet.get_member_name_from_file(os.path.basename(src)) + dest = f"{dest_name}({dest_member})" + # Checking if the members that would be created from the directory files # are already present on the system. if copy_member: diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 86ba441a6..e8e37375c 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -2595,6 +2595,46 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): hosts.all.zos_data_set(name=data_set, state="absent") +# Test related to issue #774: https://github.com/ansible-collections/ibm_zos_core/issues/774. +@pytest.mark.uss +@pytest.mark.pdse +def test_copy_file_to_non_existing_member_implicit(ansible_zos_module): + hosts = ansible_zos_module + dest_data_set = get_tmp_ds_name() + dest_member = f"{dest_data_set}(PROFILE)" + + try: + hosts.all.zos_data_set( + name=dest_data_set, + type="pdse", + space_primary=5, + space_type="m", + record_format="fba", + record_length=80, + replace=True + ) + + copy_result = hosts.all.zos_copy( + src="/etc/profile", + dest=dest_data_set, + remote_src=True + ) + + verify_copy = hosts.all.shell( + cmd="cat \"//'{0}'\" > /dev/null 2>/dev/null".format(dest_member), + executable=SHELL_EXECUTABLE, + ) + + for cp_res in copy_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == dest_member + for v_cp in verify_copy.contacted.values(): + assert v_cp.get("rc") == 0 + finally: + hosts.all.zos_data_set(name=dest_data_set, state="absent") + + @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("src", [ From 48b10af44968f31f8a4f1a5057fac0843f6cbc15 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 15 Jul 2024 10:24:40 -0600 Subject: [PATCH 435/495] [Bugfix][822]lower_case_idcams_utility (#1550) * Fix * Fix * Add fragment * Modify test case * Change fragment --------- Co-authored-by: Rich Parker <richp405@gmail.com> --- changelogs/fragments/1550-lower_case_idcams_utility.yml | 3 +++ plugins/module_utils/mvs_cmd.py | 2 +- tests/functional/modules/test_zos_find_func.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1550-lower_case_idcams_utility.yml diff --git a/changelogs/fragments/1550-lower_case_idcams_utility.yml b/changelogs/fragments/1550-lower_case_idcams_utility.yml new file mode 100644 index 000000000..121ca56ae --- /dev/null +++ b/changelogs/fragments/1550-lower_case_idcams_utility.yml @@ -0,0 +1,3 @@ +trivial: + - zos_operator - Pass capital letters to the command to idcams utility. + (https://github.com/ansible-collections/ibm_zos_core/pull/1550). \ No newline at end of file diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index 49511d725..b24bdaf5b 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -171,7 +171,7 @@ def idcams(cmd, dds=None, authorized=False): tuple(int, str, str) A tuple of return code, stdout and stderr. """ - return _run_mvs_command("IDCAMS", cmd, dds, authorized) + return _run_mvs_command("IDCAMS", cmd.upper(), dds, authorized) def ikjeft01(cmd, dds=None, authorized=False): diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 067a2f192..618f54f6b 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -210,7 +210,7 @@ def test_exclude_members_from_matched_list(ansible_zos_module): def test_find_data_sets_older_than_age(ansible_zos_module): hosts = ansible_zos_module find_res = hosts.all.zos_find( - patterns=['IMSTESTL.IMS01.RESTART', 'IMSTESTL.IMS01.LGMSGL'], + patterns=['IMSTESTL.IMS01.RESTART', "IMSTESTL.IMS01.LGMSGL".lower()], age='2d' ) print(vars(find_res)) From 24d693ae77bf258ffc63a61cfbf260ac8a2897cf Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 17 Jul 2024 12:08:38 -0400 Subject: [PATCH 436/495] [zos_find] Support GDG and special characters in data set names for zos_find (#1518) * initial fragment and added test case * added changelog file modified new test, added limit to test value * needed to add limit parameter for gdg in the batch suboption of data_set * added limit as a dependancy to batch/datasettpe * added detail to the error raised on missing limit * changing test to eliminate batch call * fixed parenthesis * update test constructor to create gds (no paren) and use force=true * detailing the decho response * trying to escape the gdg number * updated zos_data_set definition for batch mode of gdg in dataset altered test to re-test the new definition * fixed parenthesis issue * data set creator is now working. removed escapes around gdg version indicator in test. * expanded test to write content generation entry * updating test to remove prior output and truncate the final version info * removed cars print statement. * corrected join statement in string split * updated join statement * added cleanup to gdg test routine * adding detail to pds writer * updated test updated for specific file type, not just top 2 names * added special character tests for zos_find * changed an assertion in special/symbol test * added force=True to gdg deletion in that test, so the subs would all get removed * added escape to decho command * updated changelog fragment to include PR number removed print statements from test * added dataset import back into test function * working on re-establishing zos_data_set in the test form * correcting import for zos_data_set * Modified test cases to use random generated names and improve performance * Removed hard coded names * Adding support for GDG filter in zos_find * Modified tests for filtering GDGs * Added documentation samples and fixed sanity issues * fixed test suite --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ...ncement-zos-find-gdg-gds-special-chars.yml | 3 + plugins/modules/zos_find.py | 154 +++++++++++++++++- .../functional/modules/test_zos_find_func.py | 117 +++++++++++-- 3 files changed, 253 insertions(+), 21 deletions(-) create mode 100644 changelogs/fragments/1374-enhancement-zos-find-gdg-gds-special-chars.yml diff --git a/changelogs/fragments/1374-enhancement-zos-find-gdg-gds-special-chars.yml b/changelogs/fragments/1374-enhancement-zos-find-gdg-gds-special-chars.yml new file mode 100644 index 000000000..33d43e56e --- /dev/null +++ b/changelogs/fragments/1374-enhancement-zos-find-gdg-gds-special-chars.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_find - added support for GDG/GDS and special characters + (https://github.com/ansible-collections/ibm_zos_core/pull/1518). diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index b269c472d..bc03d0b74 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2023 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -109,11 +109,13 @@ - C(nonvsam) refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. - C(cluster) refers to a VSAM cluster. The C(data) and C(index) are the data and index components of a VSAM cluster. + - C(gdg) refers to Generation Data Groups. The module searches based on the GDG base name. choices: - nonvsam - cluster - data - index + - gdg type: str required: false default: "nonvsam" @@ -126,6 +128,43 @@ required: false aliases: - volumes + empty: + description: + - A GDG attribute, only valid when C(resource_type=gdg). + - If provided, will search for data sets with I(empty) attribute set as provided. + type: bool + required: false + extended: + description: + - A GDG attribute, only valid when C(resource_type=gdg). + - If provided, will search for data sets with I(extended) attribute set as provided. + type: bool + required: false + fifo: + description: + - A GDG attribute, only valid when C(resource_type=gdg). + - If provided, will search for data sets with I(fifo) attribute set as provided. + type: bool + required: false + limit: + description: + - A GDG attribute, only valid when C(resource_type=gdg). + - If provided, will search for data sets with I(limit) attribute set as provided. + type: int + required: false + purge: + description: + - A GDG attribute, only valid when C(resource_type=gdg). + - If provided, will search for data sets with I(purge) attribute set as provided. + type: bool + required: false + scratch: + description: + - A GDG attribute, only valid when C(resource_type=gdg). + - If provided, will search for data sets with I(scratch) attribute set as provided. + type: bool + required: false + notes: - Only cataloged data sets will be searched. If an uncataloged data set needs to be searched, it should be cataloged first. The L(zos_data_set,./zos_data_set.html) module can be @@ -186,6 +225,16 @@ patterns: - USER.* resource_type: cluster + +- name: Find all Generation Data Groups starting with the word 'USER' and specific GDG attributes. + zos_find: + patterns: + - USER.* + resource_type: gdg + limit: 30 + scratch: true + purge: true + """ @@ -249,6 +298,7 @@ import time import datetime import math +import json from copy import deepcopy from re import match as fullmatch @@ -535,6 +585,66 @@ def data_set_attribute_filter( return filtered_data_sets +def gdg_filter(module, data_sets, limit, empty, fifo, purge, scratch, extended): + """ Filter Generation Data Groups based on their attributes. + + Parameters + ---------- + module : AnsibleModule + The Ansible module object being used. + data_sets : set[str] + A set of data set names. + limit : int + The limit GDG attribute that should be used to filter GDGs. + empty : bool + The empty GDG attribute, that should be used to filter GDGs. + fifo : bool + The fifo GDG attribute, that should be used to filter GDGs. + purge : bool + The purge GDG attribute, that should be used to filter GDGs. + scratch : bool + The scratch GDG attribute, that should be used to filter GDGs. + extended : bool + The extended GDG attribute, that should be used to filter GDGs. + + Returns + ------- + set[str] + Matched GDG base names. + + Raises + ------ + fail_json + Non-zero return code received while executing ZOAU shell command 'dls'. + """ + filtered_data_sets = set() + for ds in data_sets: + rc, out, err = _dls_wrapper(ds, data_set_type='gdg', list_details=True, json=True) + + if rc != 0: + module.fail_json( + msg="Non-zero return code received while executing ZOAU shell command 'dls'", + rc=rc, stdout=out, stderr=err + ) + try: + response = json.loads(out) + gdgs = response['data']['gdgs'] + for gdg in gdgs: + if ( + gdg['limit'] == (gdg['limit'] if limit is None else limit) and + gdg['empty'] == (gdg['empty'] if empty is None else empty) and + gdg['purge'] == (gdg['purge'] if purge is None else purge) and + gdg['fifo'] == (gdg['fifo'] if fifo is None else fifo) and + gdg['scratch'] == (gdg['scratch'] if scratch is None else scratch) and + gdg['extended'] == (gdg['extended'] if extended is None else extended) + ): + filtered_data_sets.add(gdg['base']) + except Exception as e: + module.fail_json(repr(e)) + + return filtered_data_sets + + # TODO: # Implement volume_filter() using "vtocls" shell command from ZOAU # when it becomes available. @@ -779,7 +889,9 @@ def _dls_wrapper( u_time=False, size=False, verbose=False, - migrated=False + migrated=False, + data_set_type="", + json=False, ): """A wrapper for ZOAU 'dls' shell command. @@ -797,6 +909,10 @@ def _dls_wrapper( Display verbose information. migrated : bool Display migrated data sets. + data_set_type : str + Data set type to look for. + json : bool + Return content in json format. Returns ------- @@ -815,6 +931,10 @@ def _dls_wrapper( dls_cmd += " -s" if verbose: dls_cmd += " -v" + if data_set_type: + dls_cmd += f" -t{data_set_type}" + if json: + dls_cmd += " -j" dls_cmd += " {0}".format(quote(data_set_pattern)) return AnsibleModuleHelper(argument_spec={}).run_command(dls_cmd) @@ -926,6 +1046,12 @@ def run_module(module): ) resource_type = module.params.get('resource_type').upper() volume = module.params.get('volume') or module.params.get('volumes') + limit = module.params.get('limit') + empty = module.params.get('empty') + scratch = module.params.get('scratch') + purge = module.params.get('purge') + extended = module.params.get('extended') + fifo = module.params.get('fifo') res_args = dict(data_sets=[]) filtered_data_sets = set() @@ -983,9 +1109,11 @@ def run_module(module): res_args['examined'] = init_filtered_data_sets.get("searched") - else: + elif resource_type == "CLUSTER": filtered_data_sets = vsam_filter(module, patterns, resource_type, age=age) res_args['examined'] = len(filtered_data_sets) + elif resource_type == "GDG": + filtered_data_sets = gdg_filter(module, patterns, limit, empty, fifo, purge, scratch, extended) # Filter out data sets that match one of the patterns in 'excludes' if excludes and not pds_paths: @@ -1045,14 +1173,20 @@ def main(): ), resource_type=dict( type="str", required=False, default="nonvsam", - choices=["cluster", "data", "index", "nonvsam"] + choices=["cluster", "data", "index", "nonvsam", "gdg"] ), volume=dict( type="list", elements="str", required=False, aliases=["volumes"] - ) + ), + limit=dict(type="int", required=False), + empty=dict(type="bool", required=False), + purge=dict(type="bool", required=False), + scratch=dict(type="bool", required=False), + extended=dict(type="bool", required=False), + fifo=dict(type="bool", required=False), ) ) @@ -1077,9 +1211,15 @@ def main(): arg_type="str", required=False, default="nonvsam", - choices=["cluster", "data", "index", "nonvsam"] + choices=["cluster", "data", "index", "nonvsam", "gdg"] ), - volume=dict(arg_type="list", required=False, aliases=["volumes"]) + volume=dict(arg_type="list", required=False, aliases=["volumes"]), + limit=dict(type="int", required=False), + empty=dict(type="bool", required=False), + purge=dict(type="bool", required=False), + scratch=dict(type="bool", required=False), + extended=dict(type="bool", required=False), + fifo=dict(type="bool", required=False), ) try: BetterArgParser(arg_def).parse_args(module.params) diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 618f54f6b..3263a63fb 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -15,6 +15,10 @@ __metaclass__ = type from ibm_zos_core.tests.helpers.volumes import Volume_Handler + +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name + + import pytest SEQ_NAMES = [ @@ -34,6 +38,8 @@ ] DATASET_TYPES = ['seq', 'pds', 'pdse'] +# hlq used across the test suite. +TEST_SUITE_HLQ = "ANSIBLE" def create_vsam_ksds(ds_name, ansible_zos_module, volume="000000"): @@ -58,6 +64,69 @@ def create_vsam_ksds(ds_name, ansible_zos_module, volume="000000"): ) +def test_find_gdg_data_sets(ansible_zos_module): + hosts = ansible_zos_module + try: + gdg_a = get_tmp_ds_name() + gdg_b = get_tmp_ds_name() + gdg_c = get_tmp_ds_name() + gdg_names = [gdg_a, gdg_b, gdg_c] + + """ + Purge can only be true when scratch is, hence only one gdg for both. + FIFO is disabled in the ECs and results in failure when trying to + create a data set. + one without flags and limit 3 + """ + hosts.all.shell(cmd=f"dtouch -tgdg -L3 {gdg_a}") + # one with EXTENDED flag -X + hosts.all.shell(cmd=f"dtouch -tgdg -L1 -X {gdg_b}") + # one with PURGE flag -P and SCRATCH flag -S + hosts.all.shell(cmd=f"dtouch -tgdg -L1 -P -S {gdg_c}") + + find_res = hosts.all.zos_find( + patterns=[f'{TEST_SUITE_HLQ}.*.*'], + resource_type="gdg", + limit=3, + ) + + for val in find_res.contacted.values(): + assert val.get('msg') is None + assert len(val.get('data_sets')) == 1 + assert {"name":gdg_a, "type": "GDG"} in val.get('data_sets') + assert val.get('matched') == len(val.get('data_sets')) + + find_res = hosts.all.zos_find( + patterns=[f'{TEST_SUITE_HLQ}.*.*'], + resource_type="gdg", + extended=True, + ) + + for val in find_res.contacted.values(): + assert val.get('msg') is None + assert len(val.get('data_sets')) == 1 + assert {"name":gdg_b, "type": "GDG"} in val.get('data_sets') + assert val.get('matched') == len(val.get('data_sets')) + + find_res = hosts.all.zos_find( + patterns=[f'{TEST_SUITE_HLQ}.*.*'], + resource_type="gdg", + purge=True, + scratch=True, + ) + + for val in find_res.contacted.values(): + assert val.get('msg') is None + assert len(val.get('data_sets')) == 1 + assert {"name":gdg_c, "type": "GDG"} in val.get('data_sets') + assert val.get('matched') == len(val.get('data_sets')) + + finally: + # Remove one by one to avoid using an HLQ.* cuz it could cause bugs when running in parallel. + for ds in gdg_names: + hosts.all.shell(cmd=f"drm {ds}") + + def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): hosts = ansible_zos_module search_string = "hello" @@ -72,7 +141,6 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): patterns=['TEST.FIND.SEQ.*.*'], contains=search_string ) - print(vars(find_res)) for val in find_res.contacted.values(): assert val.get('msg') is None assert len(val.get('data_sets')) != 0 @@ -134,14 +202,13 @@ def test_find_pds_members_containing_string(ansible_zos_module): ] ) for ds in PDS_NAMES: - hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}(MEMBER)\" ") + result = hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}(MEMBER)\" ") find_res = hosts.all.zos_find( pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], contains=search_string, patterns=['.*'] ) - print(vars(find_res)) for val in find_res.contacted.values(): assert len(val.get('data_sets')) != 0 for ds in val.get('data_sets'): @@ -170,7 +237,6 @@ def test_exclude_data_sets_from_matched_list(ansible_zos_module): patterns=['TEST.FIND.SEQ.*.*'], excludes=['.*THIRD$'] ) - print(vars(find_res)) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 for ds in val.get('data_sets'): @@ -196,7 +262,6 @@ def test_exclude_members_from_matched_list(ansible_zos_module): find_res = hosts.all.zos_find( pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] ) - print(vars(find_res)) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 3 for ds in val.get('data_sets'): @@ -213,7 +278,6 @@ def test_find_data_sets_older_than_age(ansible_zos_module): patterns=['IMSTESTL.IMS01.RESTART', "IMSTESTL.IMS01.LGMSGL".lower()], age='2d' ) - print(vars(find_res)) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 assert val.get('matched') == 2 @@ -259,7 +323,6 @@ def test_find_data_sets_in_volume(ansible_zos_module): find_res = hosts.all.zos_find( patterns=[data_set_name], volumes=[volume] ) - print(vars(find_res)) for val in find_res.contacted.values(): assert len(val.get('data_sets')) >= 1 assert val.get('matched') >= 1 @@ -274,9 +337,8 @@ def test_find_vsam_pattern(ansible_zos_module): for vsam in VSAM_NAMES: create_vsam_ksds(vsam, hosts) find_res = hosts.all.zos_find( - patterns=['TEST.FIND.VSAM.*.*'], resource_type='cluster' + patterns=['TEST.FIND.VSAM.FUNCTEST.*'], resource_type='cluster' ) - print(vars(find_res)) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) @@ -297,7 +359,7 @@ def test_find_vsam_in_volume(ansible_zos_module, volumes_on_systems): create_vsam_ksds(vsam, hosts, volume=volume_1) create_vsam_ksds(alternate_vsam, hosts, volume=volume_2) find_res = hosts.all.zos_find( - patterns=['TEST.FIND.*.*.*'], volumes=[volume_1], resource_type='cluster' + patterns=['TEST.FIND.VSAM.*.*'], volumes=[volume_1], resource_type='cluster' ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 @@ -326,7 +388,6 @@ def test_find_invalid_size_indicator_fails(ansible_zos_module): def test_find_non_existent_data_sets(ansible_zos_module): hosts = ansible_zos_module find_res = hosts.all.zos_find(patterns=['TEST.FIND.NONE.*.*']) - print(vars(find_res)) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 @@ -335,7 +396,6 @@ def test_find_non_existent_data_sets(ansible_zos_module): def test_find_non_existent_data_set_members(ansible_zos_module): hosts = ansible_zos_module find_res = hosts.all.zos_find(pds_paths=['TEST.NONE.PDS.*'], patterns=['.*']) - print(vars(find_res)) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 @@ -356,7 +416,6 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): find_res = hosts.all.zos_find( pds_paths=['TEST.NONE.PDS.*','TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] ) - print(vars(find_res)) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 3 for ds in val.get('data_sets'): @@ -364,4 +423,34 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): finally: hosts.all.zos_data_set( batch=[dict(name=i, state='absent') for i in PDS_NAMES] - ) \ No newline at end of file + ) + + +def test_find_sequential_special_data_sets_containing_single_string(ansible_zos_module): + hosts = ansible_zos_module + search_string = "hello" + try: + + special_chars = ["$", "-", "@", "#"] + special_names = [ "".join([get_tmp_ds_name(mlq_size=7, llq_size=6, symbols=True), special_chars[i]]) for i in range(4)] + # Creates a command like dtouch dsname && dtouch dsname && dtouch dsname to avoid multiple ssh calls and improve test performance + dtouch_command = " && ".join([f"dtouch -tseq '{item}'" for item in special_names]) + hosts.all.shell(cmd=dtouch_command) + + # Creates a command like decho dsname && decho dsname && decho dsname to avoid multiple ssh calls and improve test performance + decho_command = " && ".join([f"decho '{search_string}' '{item}'" for item in special_names]) + hosts.all.shell(cmd=decho_command) + + find_res = hosts.all.zos_find( + patterns=[f'{TEST_SUITE_HLQ}.*.*'], + contains=search_string + ) + for val in find_res.contacted.values(): + assert val.get('msg') is None + assert len(val.get('data_sets')) != 0 + for ds in special_names: + assert {"name":ds, "type": "NONVSAM"} in val.get('data_sets') + assert val.get('matched') == len(val.get('data_sets')) + finally: + for ds in special_names: + hosts.all.shell(cmd=f"drm '{ds}'") From 7c94c05dea92443960ba8dde448efd3198dd3b3b Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 19 Jul 2024 18:10:10 -0600 Subject: [PATCH 437/495] [Enabler] [Various modules] Remove use of deprecated pipes library (#1565) * Replace use of pipes for shlex * Remove import of pipes due to use of Python 2 * Add changelog fragment * Remove import of PY3 * Add special symbols workaround * Fix syntax error * Fix another syntax error * Fix pep8 issues * Add ZOAUResponse to keep compatibility * Fix ZOAU exception * Update changed attribute in response * Add cmd to result * Add single quotes to data set names * Add workaround for batches * Fix pylint issue * Fix cat issue --- .../1565-remove-deprecated-pipes-library.yml | 11 ++ plugins/module_utils/backup.py | 7 +- plugins/module_utils/copy.py | 6 +- plugins/module_utils/encode.py | 7 +- plugins/modules/zos_apf.py | 121 +++++++++++++++++- plugins/modules/zos_find.py | 6 +- plugins/modules/zos_mvs_raw.py | 6 +- .../modules/test_module_security.py | 2 +- tests/functional/modules/test_zos_apf_func.py | 82 ++++++------ .../modules/test_zos_data_set_func.py | 2 +- 10 files changed, 179 insertions(+), 71 deletions(-) create mode 100644 changelogs/fragments/1565-remove-deprecated-pipes-library.yml diff --git a/changelogs/fragments/1565-remove-deprecated-pipes-library.yml b/changelogs/fragments/1565-remove-deprecated-pipes-library.yml new file mode 100644 index 000000000..e031caa50 --- /dev/null +++ b/changelogs/fragments/1565-remove-deprecated-pipes-library.yml @@ -0,0 +1,11 @@ +trivial: + - zos_find - remove deprecated library pipes in favor of shlex. + (https://github.com/ansible-collections/ibm_zos_core/pull/1565). + - zos_mvs_raw - remove deprecated library pipes in favor of shlex. + (https://github.com/ansible-collections/ibm_zos_core/pull/1565). + - module_utils/backup.py - remove deprecated library pipes in favor of shlex. + (https://github.com/ansible-collections/ibm_zos_core/pull/1565). + - module_utils/copy.py - remove deprecated library pipes in favor of shlex. + (https://github.com/ansible-collections/ibm_zos_core/pull/1565). + - module_utils/encode.py - remove deprecated library pipes in favor of shlex. + (https://github.com/ansible-collections/ibm_zos_core/pull/1565). diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index f409323d0..b881d6321 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -15,7 +15,6 @@ __metaclass__ = type import os -from ansible.module_utils.six import PY3 from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) @@ -44,10 +43,8 @@ except Exception: datasets = ZOAUImportError(traceback.format_exc()) exceptions = ZOAUImportError(traceback.format_exc()) -if PY3: - from shlex import quote -else: - from pipes import quote + +from shlex import quote def _validate_data_set_name(ds): diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 8172ca0bf..c42482b4f 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -17,7 +17,6 @@ import traceback from os import path -from ansible.module_utils.six import PY3 from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) @@ -30,10 +29,7 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \ ZOAUImportError -if PY3: - from shlex import quote -else: - from pipes import quote +from shlex import quote try: from zoautil_py import datasets, gdgs diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index c69e2ebd9..895cfb785 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -17,7 +17,6 @@ from tempfile import NamedTemporaryFile, mkstemp, mkdtemp from math import floor, ceil from os import path, walk, makedirs, unlink -from ansible.module_utils.six import PY3 import shutil import errno @@ -42,11 +41,7 @@ except Exception: datasets = ZOAUImportError(traceback.format_exc()) - -if PY3: - from shlex import quote -else: - from pipes import quote +from shlex import quote class Defaults: diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 36156cdd9..024ef8baa 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -305,8 +305,10 @@ try: from zoautil_py import zsystem + from zoautil_py import ztypes except Exception: zsystem = ZOAUImportError(traceback.format_exc()) + ztypes = ZOAUImportError(traceback.format_exc()) # supported data set types @@ -361,6 +363,107 @@ def backupOper(module, src, backup, tmphlq=None): return backup_name +def make_apf_command(library, opt, volume=None, sms=None, force_dynamic=None, persistent=None): + """Returns a string that can run an APF command in a shell. + + Parameters + ---------- + library : str + Name of the data set that will be operated on. + opt : str + APF operation (either add or del) + volume : str + Volume of library. + sms : bool + Whether library is managed by SMS. + force_dynamic : bool + Whether the APF list format should be dynamic. + persistent : dict + Options for persistent entries that should be modified by APF. + + Returns + ------- + str + APF command. + """ + operation = "-A" if opt == "add" else "-D" + operation_args = library + + if volume: + operation_args = f"{operation_args},{volume}" + elif sms: + operation_args = f"{operation_args},SMS" + + command = f"apfadm {operation} '{operation_args}'" + + if force_dynamic: + command = f"{command} -f" + + if persistent: + if opt == "add": + persistent_args = f""" -P '{persistent.get("addDataset")}' """ + else: + persistent_args = f""" -R '{persistent.get("delDataset")}' """ + + if persistent.get("marker"): + persistent_args = f""" {persistent_args} -M '{persistent.get("marker")}' """ + + command = f"{command} {persistent_args}" + + return command + + +def make_apf_batch_command(batch, force_dynamic=None, persistent=None): + """Returns a string that can run an APF command for multiple operations + in a shell. + + Parameters + ---------- + batch : list + List of dicts containing different APF add/del operations. + force_dynamic : bool + Whether the APF list format should be dynamic. + persistent : dict + Options for persistent entries that should be modified by APF. + + Returns + ------- + str + APF command. + """ + command = "apfadm" + + for item in batch: + operation = "-A" if item["opt"] == "add" else "-D" + operation_args = item["dsname"] + + volume = item.get("volume") + sms = item.get("sms") + + if volume: + operation_args = f"{operation_args},{volume}" + elif sms: + operation_args = f"{operation_args},SMS" + + command = f"{command} {operation} '{operation_args}'" + + if force_dynamic: + command = f"{command} -f" + + if persistent: + if persistent.get("addDataset"): + persistent_args = f""" -P '{persistent.get("addDataset")}' """ + else: + persistent_args = f""" -R '{persistent.get("delDataset")}' """ + + if persistent.get("marker"): + persistent_args = f""" {persistent_args} -M '{persistent.get("marker")}' """ + + command = f"{command} {persistent_args}" + + return command + + def main(): """Initialize the module. @@ -551,11 +654,21 @@ def main(): item['opt'] = opt item['dsname'] = item.get('library') del item['library'] - ret = zsystem.apf(batch=batch, forceDynamic=force_dynamic, persistent=persistent) + # Commenting this line to implement a workaround for names with '$'. ZOAU should + # release a fix soon so we can uncomment this Python API call. + # ret = zsystem.apf(batch=batch, forceDynamic=force_dynamic, persistent=persistent) + apf_command = make_apf_batch_command(batch, force_dynamic=force_dynamic, persistent=persistent) + rc, out, err = module.run_command(apf_command) + ret = ztypes.ZOAUResponse(rc, out, err, apf_command, 'utf-8') else: if not library: module.fail_json(msg='library is required') - ret = zsystem.apf(opt=opt, dsname=library, volume=volume, sms=sms, forceDynamic=force_dynamic, persistent=persistent) + # Commenting this line to implement a workaround for names with '$'. ZOAU should + # release a fix soon so we can uncomment this Python API call. + # ret = zsystem.apf(opt=opt, dsname=library, volume=volume, sms=sms, forceDynamic=force_dynamic, persistent=persistent) + apf_command = make_apf_command(library, opt, volume=volume, sms=sms, force_dynamic=force_dynamic, persistent=persistent) + rc, out, err = module.run_command(apf_command) + ret = ztypes.ZOAUResponse(rc, out, err, apf_command, 'utf-8') operOut = ret.stdout_response operErr = ret.stderr_response @@ -563,6 +676,10 @@ def main(): result['stderr'] = operErr result['rc'] = operRc result['stdout'] = operOut + + if operation != 'list' and operRc == 0: + result['changed'] = True + if operation == 'list': try: data = json.loads(operOut) diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index bc03d0b74..de272bfd0 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -304,7 +304,6 @@ from re import match as fullmatch -from ansible.module_utils.six import PY3 from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( @@ -319,10 +318,7 @@ AnsibleModuleHelper ) -if PY3: - from shlex import quote -else: - from pipes import quote +from shlex import quote def content_filter(module, patterns, content): diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index b06dd2ce0..b382baf25 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -1645,12 +1645,8 @@ ) import re import traceback -from ansible.module_utils.six import PY3 -if PY3: - from shlex import quote -else: - from pipes import quote +from shlex import quote try: from zoautil_py import datasets diff --git a/tests/functional/modules/test_module_security.py b/tests/functional/modules/test_module_security.py index 46a4ad01e..744d8f595 100644 --- a/tests/functional/modules/test_module_security.py +++ b/tests/functional/modules/test_module_security.py @@ -17,7 +17,7 @@ import pytest from pprint import pprint -from pipes import quote +from shlex import quote import unittest # TODO: remove some of the logic from tests and make pytest fixtures diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 56ed39c30..4bb0e9041 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -42,10 +42,10 @@ /*ENDAPFLIST*/""" def clean_test_env(hosts, test_info): - cmd_str = f"drm {test_info['library']}" + cmd_str = f"drm '{test_info['library']}' " hosts.all.shell(cmd=cmd_str) if test_info.get('persistent'): - cmd_str = f"drm {test_info['persistent']['data_set_name']}" + cmd_str = f"drm '{test_info['persistent']['data_set_name']}' " hosts.all.shell(cmd=cmd_str) @@ -60,10 +60,10 @@ def test_add_del(ansible_zos_module, volumes_with_vvds): "force_dynamic":True } ds = get_tmp_ds_name(3,2) - hosts.all.shell(f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -74,7 +74,7 @@ def test_add_del(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) @@ -106,10 +106,10 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): } test_info['tmp_hlq'] = tmphlq ds = get_tmp_ds_name(3,2,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -120,7 +120,7 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) @@ -148,10 +148,10 @@ def test_add_del_volume(ansible_zos_module, volumes_with_vvds): } ds = get_tmp_ds_name(1,1,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -162,7 +162,7 @@ def test_add_del_volume(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) @@ -221,10 +221,10 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): "force_dynamic":True } ds = get_tmp_ds_name(1,1,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -235,7 +235,7 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) @@ -298,15 +298,15 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): } for item in test_info['batch']: ds = get_tmp_ds_name(1,1,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") item['library'] = ds - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") item['volume'] = vol prstds = get_tmp_ds_name(5,5,True) - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds @@ -322,7 +322,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): test_info['batch'][2]['volume'] ) add_exptd = add_exptd.replace(" ", "") - cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + cmd_str = f"""dcat '{test_info["persistent"]["data_set_name"]}' """ results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -333,7 +333,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): assert result.get("rc") == 0 del_exptd = DEL_EXPECTED.replace(" ", "") - cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + cmd_str = f"""dcat '{test_info["persistent"]["data_set_name"]}' """ results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -342,7 +342,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): finally: for item in test_info['batch']: clean_test_env(hosts, item) - hosts.all.shell(cmd=f"drm {test_info['persistent']['data_set_name']}") + hosts.all.shell(cmd=f"drm '{test_info['persistent']['data_set_name']}' ") def test_operation_list(ansible_zos_module): @@ -371,10 +371,10 @@ def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -385,7 +385,7 @@ def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds hosts.all.zos_apf(**test_info) @@ -420,10 +420,10 @@ def test_add_already_present(ansible_zos_module, volumes_with_vvds): } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -434,7 +434,7 @@ def test_add_already_present(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds results = hosts.all.zos_apf(**test_info) @@ -461,10 +461,10 @@ def test_del_not_present(ansible_zos_module, volumes_with_vvds): "force_dynamic":True } ds = get_tmp_ds_name(1,1,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -475,7 +475,7 @@ def test_del_not_present(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds test_info['state'] = 'absent' @@ -514,10 +514,10 @@ def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -528,7 +528,7 @@ def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds test_info['volume'] = 'T12345' @@ -556,10 +556,10 @@ def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -570,7 +570,7 @@ def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds ds_name = test_info['persistent']['data_set_name'] @@ -599,10 +599,10 @@ def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -613,7 +613,7 @@ def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds test_info['persistent']['marker'] = "# Invalid marker format" @@ -640,10 +640,10 @@ def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): } test_info['state'] = 'present' ds = get_tmp_ds_name(3,2,True) - hosts.all.shell(cmd=f"dtouch -tseq -V{volume} {ds} ") + hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{ds}' ") test_info['library'] = ds if test_info.get('volume') is not None: - cmd_str = "dls -l " + ds + " | awk '{print $5}' " + cmd_str = "dls -l '" + ds + "' | awk '{print $5}' " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): vol = result.get("stdout") @@ -654,7 +654,7 @@ def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): for result in results.contacted.values(): prstds = result.get("stdout") prstds = prstds[:30] - cmd_str = f"dtouch -tseq {prstds}" + cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) test_info['persistent']['data_set_name'] = prstds test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 495e53f9e..f210377d9 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -16,7 +16,7 @@ __metaclass__ = type import time -from pipes import quote +from shlex import quote from pprint import pprint import pytest From 417ef1bc69397d0b216d8eb0c20089cd961cc3f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Sat, 20 Jul 2024 16:51:14 -0600 Subject: [PATCH 438/495] [BGYSC0822E][1308][Zos_Operator]Console_parallel (#1553) * Probe solution * Add fragment * Fix comment * Update conftest.py Delete imports * Test all times * Avoid uses of users * Add fix * Update test_zos_operator_func.py --- .../fragments/1553-Console_parallel.yml | 3 + tests/conftest.py | 9 ++- .../modules/test_zos_operator_func.py | 72 +++++++++++++++++++ 3 files changed, 83 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1553-Console_parallel.yml diff --git a/changelogs/fragments/1553-Console_parallel.yml b/changelogs/fragments/1553-Console_parallel.yml new file mode 100644 index 000000000..3c879a0ce --- /dev/null +++ b/changelogs/fragments/1553-Console_parallel.yml @@ -0,0 +1,3 @@ +trivial: + - zos_operator - Validate the use of two terminals with a parallel call of zos_operator. + (https://github.com/ansible-collections/ibm_zos_core/pull/1553). \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 7fea5ac0d..9a9cc9ad6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -128,4 +128,11 @@ def perform_imports(imports): newimp = [importlib.import_module(x) for x in imports] return newimp - yield (mocker, perform_imports) \ No newline at end of file + yield (mocker, perform_imports) + + +@pytest.fixture(scope="function") +def get_config(request): + """ Call the pytest-ansible plugin to check volumes on the system and work properly a list by session.""" + path = request.config.getoption("--zinventory") + yield path diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 123aeb57a..eb1bf1f60 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -16,6 +16,9 @@ __metaclass__ = type import pytest +import yaml +import os +from shellescape import quote from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( zoau_version_checker, @@ -24,6 +27,44 @@ __metaclass__ = type +PARALLEL_RUNNING = """- hosts : zvm + collections : + - ibm.ibm_zos_core + gather_facts: False + vars: + ZOAU: "{0}" + PYZ: "{1}" + environment: + _BPXK_AUTOCVT: "ON" + ZOAU_HOME: "{0}" + PYTHONPATH: "{0}/lib" + LIBPATH: "{0}/lib:{1}/lib:/lib:/usr/lib:." + PATH: "{0}/bin:/bin:/usr/lpp/rsusr/ported/bin:/var/bin:/usr/lpp/rsusr/ported/bin:/usr/lpp/java/java180/J8.0_64/bin:{1}/bin:" + _CEE_RUNOPTS: "FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)" + _TAG_REDIR_ERR: "txt" + _TAG_REDIR_IN: "txt" + _TAG_REDIR_OUT: "txt" + LANG: "C" + tasks: + - name: zos_operator + zos_operator: + cmd: 'd a,all' + wait_time_s: 3 + verbose: true + register: output + + - name: print output + debug: + var: output""" + +INVENTORY = """all: + hosts: + zvm: + ansible_host: {0} + ansible_ssh_private_key_file: {1} + ansible_user: {2} + ansible_python_interpreter: /allpython/3.9/usr/lpp/IBM/cyp/v3r9/pyz/bin/python3.9""" + def test_zos_operator_various_command(ansible_zos_module): test_data = [ @@ -141,3 +182,34 @@ def test_response_come_back_complete(ansible_zos_module): # HASP646 Only appears in the last line that before did not appears last_line = len(stdout) assert "HASP646" in stdout[last_line - 1] + + +def test_zos_operator_parallel_terminal(get_config): + path = get_config + with open(path, 'r') as file: + enviroment = yaml.safe_load(file) + ssh_key = enviroment["ssh_key"] + hosts = enviroment["host"].upper() + user = enviroment["user"].upper() + python_path = enviroment["python_path"] + cut_python_path = python_path[:python_path.find('/bin')].strip() + zoau = enviroment["environment"]["ZOAU_ROOT"] + try: + playbook = "playbook.yml" + inventory = "inventory.yml" + os.system("echo {0} > {1}".format(quote(PARALLEL_RUNNING.format( + zoau, + cut_python_path, + )), playbook)) + os.system("echo {0} > {1}".format(quote(INVENTORY.format( + hosts, + ssh_key, + user, + )), inventory)) + command = "(ansible-playbook -i {0} {1}) & (ansible-playbook -i {0} {1})".format(inventory, playbook) + stdout = os.system(command) + assert stdout == 0 + finally: + os.remove("inventory.yml") + os.remove("playbook.yml") + From b329b0010015abc47380702769b47fe629f30e36 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 24 Jul 2024 15:01:35 -0600 Subject: [PATCH 439/495] [Bug][zos_copy] Avoid using opercmd if data set does not exist (#1623) * Replace use of pipes for shlex * Remove import of pipes due to use of Python 2 * Add changelog fragment * Remove import of PY3 * Add special symbols workaround * Fix syntax error * Fix another syntax error * Fix pep8 issues * Add ZOAUResponse to keep compatibility * Fix ZOAU exception * Added code change * Added changelog --------- Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> --- changelogs/fragments/1623-zos_copy-avoid-opercmd.yml | 5 +++++ plugins/modules/zos_copy.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1623-zos_copy-avoid-opercmd.yml diff --git a/changelogs/fragments/1623-zos_copy-avoid-opercmd.yml b/changelogs/fragments/1623-zos_copy-avoid-opercmd.yml new file mode 100644 index 000000000..4b7546fcb --- /dev/null +++ b/changelogs/fragments/1623-zos_copy-avoid-opercmd.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - module would use opercmd to check if a non existent + destination data set is locked. Fix now only checks if the destination + is already present. + (https://github.com/ansible-collections/ibm_zos_core/pull/1623). \ No newline at end of file diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 46f399360..6da0232a2 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -3496,7 +3496,7 @@ def run_module(module, arg_def): # the machine and not generate a false positive check the disposition # for try to write in dest and if both src and dest are in lock. # ******************************************************************** - if dest_ds_type != "USS": + if dest_exists and dest_ds_type != "USS": if not force_lock: is_dest_lock = data_set_locked(dest_name) if is_dest_lock: From 7b6becbe025b91ac68d0feaf451a8861628dc0b7 Mon Sep 17 00:00:00 2001 From: IsaacVRey <isaac.vega.rey@gmail.com> Date: Fri, 26 Jul 2024 00:09:33 -0600 Subject: [PATCH 440/495] [Enabler][test_zos_find_func] Update test suites on functional/modules/test_zos_find_func.py to be pylint correct (#1459) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update test suites on functional/module-utils/test_zos_data_set_func.py to be pylint correct * Correct functional/modules/test_zos_data_set_func.py * Correct functional/modules/test_zos_data_set_func.py * Update test suites on functional/modules/test_zos_find_func.py to be pylint correct * Fix typo --------- Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> Co-authored-by: Demetri <dimatos@gmail.com> --- .../functional/modules/test_zos_find_func.py | 160 ++++++++++++++---- 1 file changed, 124 insertions(+), 36 deletions(-) diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 3263a63fb..5f0227878 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -44,19 +44,16 @@ def create_vsam_ksds(ds_name, ansible_zos_module, volume="000000"): hosts = ansible_zos_module - alloc_cmd = """ DEFINE CLUSTER (NAME({0}) - + alloc_cmd = f""" DEFINE CLUSTER (NAME({ds_name}) - INDEXED - RECSZ(80,80) - TRACKS(1,1) - KEYS(5,0) - CISZ(4096) - - VOLUMES({1}) - + VOLUMES({volume}) - FREESPACE(3,3) ) - - DATA (NAME({0}.DATA)) - - INDEX (NAME({0}.INDEX))""".format( - ds_name, volume - ) - + DATA (NAME({ds_name}.DATA)) - + INDEX (NAME({ds_name}.INDEX))""" return hosts.all.shell( cmd="mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", executable='/bin/sh', @@ -132,7 +129,13 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): search_string = "hello" try: hosts.all.zos_data_set( - batch=[dict(name=i, type='seq', state='present') for i in SEQ_NAMES] + batch=[ + { + "name":i, + "type":'seq', + "state":'present' + } for i in SEQ_NAMES + ] ) for ds in SEQ_NAMES: hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}\" ") @@ -149,7 +152,12 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): assert val.get('matched') == len(val.get('data_sets')) finally: hosts.all.zos_data_set( - batch=[dict(name=i, state='absent') for i in SEQ_NAMES] + batch=[ + { + "name":i, + "state":'absent' + } for i in SEQ_NAMES + ] ) @@ -159,7 +167,13 @@ def test_find_sequential_data_sets_multiple_patterns(ansible_zos_module): new_ds = "TEST.FIND.SEQ.FUNCTEST.FOURTH" try: hosts.all.zos_data_set( - batch=[dict(name=i, type='seq', state='present') for i in SEQ_NAMES] + batch=[ + { + "name":i, + "type":'seq', + "state":'present' + } for i in SEQ_NAMES + ] ) hosts.all.zos_data_set(name=new_ds, type='seq', state='present') hosts.all.shell(cmd=f"decho 'incorrect string' \"{new_ds}\" ") @@ -177,7 +191,12 @@ def test_find_sequential_data_sets_multiple_patterns(ansible_zos_module): assert val.get('matched') == len(val.get('data_sets')) finally: hosts.all.zos_data_set( - batch=[dict(name=i, state='absent') for i in SEQ_NAMES] + batch=[ + { + "name":i, + "state":'absent' + } for i in SEQ_NAMES + ] ) hosts.all.zos_data_set( name=new_ds, state='absent' @@ -189,16 +208,23 @@ def test_find_pds_members_containing_string(ansible_zos_module): search_string = "hello" try: hosts.all.zos_data_set( - batch=[dict(name=i, type='pds', space_primary=1, space_type="m") for i in PDS_NAMES] + batch=[ + { + "name":i, + "type":'pds', + "space_primary":1, + "space_type":"m", + } for i in PDS_NAMES + ] ) hosts.all.zos_data_set( batch=[ - dict( - name=i + "(MEMBER)", - type="member", - state='present', - replace='yes' - ) for i in PDS_NAMES + { + "name":i + "(MEMBER)", + "type":"member", + "state":'present', + "replace":'yes', + } for i in PDS_NAMES ] ) for ds in PDS_NAMES: @@ -216,7 +242,12 @@ def test_find_pds_members_containing_string(ansible_zos_module): assert len(ds.get('members')) == 1 finally: hosts.all.zos_data_set( - batch=[dict(name=i, state='absent') for i in PDS_NAMES] + batch=[ + { + "name":i, + "state":'absent' + } for i in PDS_NAMES + ] ) @@ -225,12 +256,12 @@ def test_exclude_data_sets_from_matched_list(ansible_zos_module): try: hosts.all.zos_data_set( batch=[ - dict( - name=i, - type='seq', - record_length=80, - state='present' - ) for i in SEQ_NAMES + { + "name":i, + "type":'seq', + "record_length":80, + "state":'present' + } for i in SEQ_NAMES ] ) find_res = hosts.all.zos_find( @@ -243,7 +274,12 @@ def test_exclude_data_sets_from_matched_list(ansible_zos_module): assert ds.get('name') in SEQ_NAMES finally: hosts.all.zos_data_set( - batch=[dict(name=i, state='absent') for i in SEQ_NAMES] + batch=[ + { + "name":i, + "state":'absent' + } for i in SEQ_NAMES + ] ) @@ -251,13 +287,29 @@ def test_exclude_members_from_matched_list(ansible_zos_module): hosts = ansible_zos_module try: hosts.all.zos_data_set( - batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES] + batch=[ + { + "name":i, + "type":'pds', + "state":'present' + } for i in PDS_NAMES + ] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(MEMBER)", type="member") for i in PDS_NAMES] + batch=[ + { + "name":i + "(MEMBER)", + "type":"member" + } for i in PDS_NAMES + ] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(FILE)", type="member") for i in PDS_NAMES] + batch=[ + { + "name":i + "(FILE)", + "type":"member" + } for i in PDS_NAMES + ] ) find_res = hosts.all.zos_find( pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] @@ -268,7 +320,12 @@ def test_exclude_members_from_matched_list(ansible_zos_module): assert len(ds.get('members')) == 1 finally: hosts.all.zos_data_set( - batch=[dict(name=i, state='absent') for i in PDS_NAMES] + batch=[ + { + "name":i, + "state":'absent' + } for i in PDS_NAMES + ] ) @@ -344,7 +401,12 @@ def test_find_vsam_pattern(ansible_zos_module): assert val.get('matched') == len(val.get('data_sets')) finally: hosts.all.zos_data_set( - batch=[dict(name=i, state='absent') for i in VSAM_NAMES] + batch=[ + { + "name":i, + "state":'absent' + } for i in VSAM_NAMES + ] ) @@ -366,7 +428,12 @@ def test_find_vsam_in_volume(ansible_zos_module, volumes_on_systems): assert val.get('matched') == len(val.get('data_sets')) finally: hosts.all.zos_data_set( - batch=[dict(name=i, state='absent') for i in VSAM_NAMES] + batch=[ + { + "name":i, + "state":'absent' + } for i in VSAM_NAMES + ] ) hosts.all.zos_data_set(name=alternate_vsam, state='absent') @@ -405,13 +472,29 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): hosts = ansible_zos_module try: hosts.all.zos_data_set( - batch=[dict(name=i, type='pds', state='present') for i in PDS_NAMES] + batch=[ + { + "name":i, + "type":'pds', + "state":'present' + } for i in PDS_NAMES + ] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(MEMBER)", type="member") for i in PDS_NAMES] + batch=[ + { + "name":i + "(MEMBER)", + "type":"member" + } for i in PDS_NAMES + ] ) hosts.all.zos_data_set( - batch=[dict(name=i + "(FILE)", type="member") for i in PDS_NAMES] + batch=[ + { + "name":i + "(FILE)", + "type":"member" + } for i in PDS_NAMES + ] ) find_res = hosts.all.zos_find( pds_paths=['TEST.NONE.PDS.*','TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] @@ -422,7 +505,12 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): assert len(ds.get('members')) == 1 finally: hosts.all.zos_data_set( - batch=[dict(name=i, state='absent') for i in PDS_NAMES] + batch=[ + { + "name":i, + "state":'absent' + } for i in PDS_NAMES + ] ) From 9fc0d97368a1fa0d91403335d2843adec2c2be9c Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 26 Jul 2024 13:27:24 -0700 Subject: [PATCH 441/495] Update code to share exception types UnicodeDecodeError, JSONDecodeError, TypeError, KeyError (#1560) * update code to share exception type Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> * Fix pyflakes unused variable Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/1512-bugfix-zos_job_submit-error-type.yml | 5 +++++ plugins/module_utils/job.py | 7 ++++--- 2 files changed, 9 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/1512-bugfix-zos_job_submit-error-type.yml diff --git a/changelogs/fragments/1512-bugfix-zos_job_submit-error-type.yml b/changelogs/fragments/1512-bugfix-zos_job_submit-error-type.yml new file mode 100644 index 000000000..5078064ec --- /dev/null +++ b/changelogs/fragments/1512-bugfix-zos_job_submit-error-type.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_job_submit - Was not propagating any error types UnicodeDecodeError, + JSONDecodeError, TypeError, KeyError when encountered, now the error + message shares the type error. + (https://github.com/ansible-collections/ibm_zos_core/pull/1560). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 8d9ac3a5c..ab2c98a62 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -463,10 +463,11 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T single_dd["step_name"], single_dd["dd_name"] ) - except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError): + except (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError) as e: + error_type = e.__class__.__name__ tmpcont = ( - "Non-printable UTF-8 characters were present in this output. " - "Please access it from the job log." + f"Non-printable UTF-8 characters were present in this output, a {error_type} error has occurred." + "Please access the content from the job log." ) dd["content"] = tmpcont.split("\n") From b1179b301c65c5f97733ac151cd6fc0e89403a68 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 31 Jul 2024 17:27:26 -0400 Subject: [PATCH 442/495] [enabler] [1378] [zos_mount] special charactersupport (#1631) * initial commit to get pr number * updated pr number, added innersource references to zos_mount * added dataset class to enable escape function * added a series of symbols to test-aggregate file creation This should test all scenarios for all 4 characters. * added escape call on generated data set name * trying to return base/non-escaped string to pass into mount. working around a better-arg-parser error. * added pretty print to test output to check on remount pre-test * corrected pretty print call * expanding display of results to first mount and unmount. * it looks like the test assumes absent=unmounted, which is not always true I'm switching prior testst absent->unmounted, to see if that moves the issue. * found issue where 'absent' did not delete the aggregate set tests back to use absent, and left print/pp in * changing test/create source file * removing sybols in test to see if the mount still has issues. * expanding output of pre-process files * removed escape function from main zos_mount in one or more of the calls, it is interfering with tso command generation * adding delete into end of first test. * changed dataset.delete to shell drm in first test * corrected ALL tests to use shell/drm to make sure system stays cleaned up * added symbols in test * the shell delete required the dsname to be escaped, while mount did not All mount tests now escape dsname for removal * cleanup of test output messages cleanup of escape calls in zos_mount * removed unused import in mount. Updated text in the changelog. * removed unused 'innersource' variables from mount routine, since they were no longer needed. * removed self-reference of src=src, cause when removing innersrc * renamed fragment to contain PR# --- ...er-zos_mount-special-character-support.yml | 4 ++ plugins/modules/zos_mount.py | 3 +- .../functional/modules/test_zos_mount_func.py | 61 ++++++++++++++++--- 3 files changed, 57 insertions(+), 11 deletions(-) create mode 100644 changelogs/fragments/1631-enabler-zos_mount-special-character-support.yml diff --git a/changelogs/fragments/1631-enabler-zos_mount-special-character-support.yml b/changelogs/fragments/1631-enabler-zos_mount-special-character-support.yml new file mode 100644 index 000000000..7880390a1 --- /dev/null +++ b/changelogs/fragments/1631-enabler-zos_mount-special-character-support.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_mount - Added support for data set names with special characters ($, /#, /- and @). + This is for both src and backup data set names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1631). \ No newline at end of file diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 7b4b04654..cabf94b0f 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -546,9 +546,7 @@ better_arg_parser, data_set, backup as Backup, - ) - from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.copy import ( copy_ps2uss, copy_uss2mvs, @@ -740,6 +738,7 @@ def run_module(module, arg_def): res_args = dict() src = parsed_args.get("src") + path = parsed_args.get("path") fs_type = parsed_args.get("fs_type").upper() state = parsed_args.get("state") diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 1e593c3ff..7adb888a8 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -11,7 +11,9 @@ from ibm_zos_core.tests.helpers.volumes import Volume_Handler from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name - +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( + DataSet, +) INITIAL_PRM_MEMBER = """/* Initial file to look like BPXPRM */ /* some settings at the top */ @@ -29,7 +31,6 @@ AUTOMOVE """ -# SHELL_EXECUTABLE = "/usr/lpp/rsusr/ported/bin/bash" SHELL_EXECUTABLE = "/bin/sh" @@ -57,29 +58,34 @@ def populate_tmpfile(): def create_sourcefile(hosts, volume): + # returns un-escaped source file name, but uses escaped file name for shell commands + # this is intentionally done to test escaping of data set names starter = get_sysname(hosts).split(".")[0].upper() if len(starter) < 2: starter = "IMSTESTU" - thisfile = starter + ".TTT.MNT.ZFS" + basefile = starter + ".A@$#TO.MNT.ZFS" + thisfile = DataSet.escape_data_set_name(basefile) print( - "csf: starter={0} thisfile={1} is type {2}".format( + "\ncsf: starter={0} thisfile={1} is type {2}".format( starter, thisfile, str(type(thisfile)) ) ) - hosts.all.shell( + mount_result = hosts.all.shell( cmd="zfsadm define -aggregate " + thisfile + " -volumes {0} -cylinders 200 1".format(volume), executable=SHELL_EXECUTABLE, stdin="", ) - hosts.all.shell( + + mount_result = hosts.all.shell( cmd="zfsadm format -aggregate " + thisfile, executable=SHELL_EXECUTABLE, stdin="", ) - return thisfile + + return basefile def test_basic_mount(ansible_zos_module, volumes_on_systems): @@ -102,8 +108,13 @@ def test_basic_mount(ansible_zos_module, volumes_on_systems): fs_type="zfs", state="absent", ) - hosts.all.file(path="/pythonx/", state="absent") + hosts.all.shell( + cmd="drm " + DataSet.escape_data_set_name(srcfn), + executable=SHELL_EXECUTABLE, + stdin="", + ) + hosts.all.file(path="/pythonx/", state="absent") def test_double_mount(ansible_zos_module, volumes_on_systems): @@ -128,6 +139,12 @@ def test_double_mount(ansible_zos_module, volumes_on_systems): fs_type="zfs", state="absent", ) + hosts.all.shell( + cmd="drm " + DataSet.escape_data_set_name(srcfn), + executable=SHELL_EXECUTABLE, + stdin="", + ) + hosts.all.file(path="/pythonx/", state="absent") @@ -137,7 +154,10 @@ def test_remount(ansible_zos_module, volumes_on_systems): volume_1 = volumes.get_available_vol() srcfn = create_sourcefile(hosts, volume_1) try: + mount_results = hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="zfs", state="mounted") + hosts.all.zos_mount(src=srcfn, path="/pythonx", fs_type="zfs", state="mounted") + mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", fs_type="zfs", state="remounted" ) @@ -145,12 +165,18 @@ def test_remount(ansible_zos_module, volumes_on_systems): assert result.get("rc") == 0 assert result.get("changed") is True finally: - hosts.all.zos_mount( + mount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", fs_type="zfs", state="absent", ) + hosts.all.shell( + cmd="drm " + DataSet.escape_data_set_name(srcfn), + executable=SHELL_EXECUTABLE, + stdin="", + ) + hosts.all.file(path="/pythonx/", state="absent") @@ -212,6 +238,11 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ fs_type="zfs", state="absent", ) + hosts.all.shell( + cmd="drm " + DataSet.escape_data_set_name(srcfn), + executable=SHELL_EXECUTABLE, + stdin="", + ) hosts.all.file(path=tmp_file_filename, state="absent") hosts.all.file(path="/pythonx/", state="absent") hosts.all.zos_data_set( @@ -329,6 +360,12 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst fs_type="zfs", state="absent", ) + hosts.all.shell( + cmd="drm " + DataSet.escape_data_set_name(srcfn), + executable=SHELL_EXECUTABLE, + stdin="", + ) + hosts.all.file(path=tmp_file_filename, state="absent") hosts.all.file(path=test_tmp_file_filename, state="absent") hosts.all.file(path="/pythonx/", state="absent") @@ -367,6 +404,12 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) tmp_hlq=tmphlq, persistent=dict(data_store=persist_data_set, backup=True) ) + hosts.all.shell( + cmd="drm " + DataSet.escape_data_set_name(srcfn), + executable=SHELL_EXECUTABLE, + stdin="", + ) + hosts.all.zos_data_set(name=persist_data_set, state="absent") for result in unmount_result.values(): assert result.get("rc") == 0 From 10ac10cd7d08ec6cbcde8380ff501f1274ed6b52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 6 Aug 2024 14:36:22 -0600 Subject: [PATCH 443/495] [Enabler][1588]zos_tso_command_portability (#1639) * Fix problem * Add fragment --- .../fragments/1639-zos_tso_command_portability.yml | 3 +++ .../functional/modules/test_zos_tso_command_func.py | 12 +++++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/1639-zos_tso_command_portability.yml diff --git a/changelogs/fragments/1639-zos_tso_command_portability.yml b/changelogs/fragments/1639-zos_tso_command_portability.yml new file mode 100644 index 000000000..8dbb40376 --- /dev/null +++ b/changelogs/fragments/1639-zos_tso_command_portability.yml @@ -0,0 +1,3 @@ +trivial: + - test_zos_tso_command_func.py - Remove the use of hard coded user. + (https://github.com/ansible-collections/ibm_zos_core/pull/1639). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index 52e12032b..cacfd99c1 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -30,10 +30,13 @@ def test_zos_tso_command_run_help(ansible_zos_module): # Run a long tso command to allocate a dataset. def test_zos_tso_command_long_command_128_chars(ansible_zos_module): hosts = ansible_zos_module + results = hosts.all.shell(cmd="echo $USER") + for result in results.contacted.values(): + user = result.get("stdout") command_string = [ ( "send 'Hello, this is a test message from zos_tso_command module. " - "Im sending a command exceed 80 chars. Thank you.' user(omvsadm)" + "Im sending a command exceed 80 chars. Thank you.' user({0})".format(user) ) ] results = hosts.all.zos_tso_command(commands=command_string) @@ -118,11 +121,14 @@ def test_zos_tso_command_invalid_command(ansible_zos_module): # The multiple commands def test_zos_tso_command_multiple_commands(ansible_zos_module): hosts = ansible_zos_module - commands_list = ["LU omvsadm", "LISTGRP"] + results = hosts.all.shell(cmd="echo $USER") + for result in results.contacted.values(): + user = result.get("stdout") + commands_list = ["LU {0}".format(user), "LISTGRP"] results = hosts.all.zos_tso_command(commands=commands_list) for result in results.contacted.values(): for item in result.get("output"): - if item.get("command") == "LU omvsadm": + if item.get("command") == "LU {0}".format(user): assert item.get("rc") == 0 if item.get("command") == "LISTGRP": assert item.get("rc") == 0 From e2828a030a7d25b36542bf1d2dde12561036b2c9 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 7 Aug 2024 20:06:29 -0600 Subject: [PATCH 444/495] [Enabler] [zos_mvs_raw] Changes to ensure tests portability (#1633) * Add IDCAMS dataset creation * Update most tests * Fix list and concat tests * Shorten IBM-1047 expected string in test * Add changelog fragment * Remove hardcoded var and renamed its replacement for clarity * Fix pylint issues --- .../1633-zos_mvs_raw_tests_portability.yml | 4 + .../modules/test_zos_mvs_raw_func.py | 627 ++++++++++++------ 2 files changed, 427 insertions(+), 204 deletions(-) create mode 100644 changelogs/fragments/1633-zos_mvs_raw_tests_portability.yml diff --git a/changelogs/fragments/1633-zos_mvs_raw_tests_portability.yml b/changelogs/fragments/1633-zos_mvs_raw_tests_portability.yml new file mode 100644 index 000000000..b6ecfca69 --- /dev/null +++ b/changelogs/fragments/1633-zos_mvs_raw_tests_portability.yml @@ -0,0 +1,4 @@ +trivial: + - test_zos_mvs_raw_func.py - Remove the use of hard coded dataset + names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1633). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index a7a5667f5..00dd56e31 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -27,10 +27,19 @@ DEFAULT_DD = "MYDD" SYSIN_DD = "SYSIN" SYSPRINT_DD = "SYSPRINT" -IDCAMS_STDIN = f" LISTCAT ENTRIES('{EXISTING_DATA_SET.upper()}')" IDCAMS_INVALID_STDIN = " hello world #$!@%!#$!@``~~^$*%" +def get_temp_idcams_dataset(hosts): + """Returns IDCAMS args that use a newly created PDS. + """ + dataset_name = get_tmp_ds_name() + + hosts.all.shell(f"""dtouch -tPDS -l80 -rFB '{dataset_name}' """) + hosts.all.shell(f"""decho 'A record' '{dataset_name}(MEMBER)' """) + + return dataset_name, f" LISTCAT ENTRIES('{dataset_name.upper()}')" + # ---------------------------------------------------------------------------- # # Data set DD tests # # ---------------------------------------------------------------------------- # @@ -52,9 +61,12 @@ def test_failing_name_format(ansible_zos_module): def test_disposition_new(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + hosts.all.zos_data_set(name=default_data_set, state="absent") results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -74,7 +86,7 @@ def test_disposition_new(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -83,7 +95,9 @@ def test_disposition_new(ansible_zos_module): assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - results = hosts.all.zos_data_set(name=default_data_set, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -91,12 +105,15 @@ def test_disposition_new(ansible_zos_module): ["shr", "mod", "old"], ) def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( name=default_data_set, type="seq", state="present", replace=True ) + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -114,7 +131,7 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -123,66 +140,78 @@ def test_dispositions_for_existing_data_set(ansible_zos_module, disposition): assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - results = hosts.all.zos_data_set(name=default_data_set, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_list_cat_for_existing_data_set_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems): - hosts = ansible_zos_module - tmphlq = "TMPHLQ" - volumes = Volume_Handler(volumes_on_systems) - default_volume = volumes.get_available_vol() - default_data_set = get_tmp_ds_name()[:25] - hosts.all.zos_data_set( - name=default_data_set, type="seq", state="present", replace=True - ) - results = hosts.all.zos_mvs_raw( - program_name="idcams", - auth=True, - tmp_hlq=tmphlq, - dds=[ - { - "dd_data_set":{ - "dd_name":SYSPRINT_DD, - "data_set_name":default_data_set, - "disposition":"new", - "return_content":{ - "type":"text" + idcams_dataset = None + try: + hosts = ansible_zos_module + tmphlq = "TMPHLQ" + volumes = Volume_Handler(volumes_on_systems) + default_volume = volumes.get_available_vol() + default_data_set = get_tmp_ds_name()[:25] + hosts.all.zos_data_set( + name=default_data_set, type="seq", state="present", replace=True + ) + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + + results = hosts.all.zos_mvs_raw( + program_name="idcams", + auth=True, + tmp_hlq=tmphlq, + dds=[ + { + "dd_data_set":{ + "dd_name":SYSPRINT_DD, + "data_set_name":default_data_set, + "disposition":"new", + "return_content":{ + "type":"text" + }, + "replace":True, + "backup":True, + "type":"seq", + "space_primary":5, + "space_secondary":1, + "space_type":"m", + "volumes":default_volume, + "record_format":"fb" }, - "replace":True, - "backup":True, - "type":"seq", - "space_primary":5, - "space_secondary":1, - "space_type":"m", - "volumes":default_volume, - "record_format":"fb" - }, - }, - { - "dd_input":{ - "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN - } - }, - ], - ) - for result in results.contacted.values(): - assert result.get("ret_code", {}).get("code", -1) == 0 - assert len(result.get("dd_names", [])) > 0 - for backup in result.get("backups"): - backup.get("backup_name")[:6] == tmphlq - results = hosts.all.zos_data_set(name=default_data_set, state="absent") - for result in results.contacted.values(): - assert result.get("changed", False) is True + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":idcams_listcat_dataset_cmd + } + }, + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", -1) == 0 + assert len(result.get("dd_names", [])) > 0 + for backup in result.get("backups"): + backup.get("backup_name")[:6] == tmphlq + for result in results.contacted.values(): + assert result.get("changed", False) is True + finally: + results = hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") # * new data set and append to member in one step not currently supported def test_new_disposition_for_data_set_members(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() default_data_set_with_member = default_data_set + '(MEM)' hosts.all.zos_data_set(name=default_data_set, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -202,7 +231,7 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -211,6 +240,8 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): assert result.get("ret_code", {}).get("code", -1) == 8 finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -218,6 +249,7 @@ def test_new_disposition_for_data_set_members(ansible_zos_module): ["shr", "mod", "old"], ) def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposition): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() @@ -225,6 +257,8 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit hosts.all.zos_data_set( name=default_data_set, type="pds", state="present", replace=True ) + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -242,7 +276,7 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -252,6 +286,8 @@ def test_dispositions_for_existing_data_set_members(ansible_zos_module, disposit assert len(result.get("dd_names", [])) > 0 finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -264,11 +300,14 @@ def test_normal_dispositions_data_set( changed, volumes_on_systems ): + idcams_dataset = None try: hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() default_data_set = get_tmp_ds_name() + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_data_set( name=default_data_set, type="seq", @@ -295,7 +334,7 @@ def test_normal_dispositions_data_set( { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -304,7 +343,9 @@ def test_normal_dispositions_data_set( assert result.get("ret_code", {}).get("code", -1) == 0 assert len(result.get("dd_names", [])) > 0 finally: - results = hosts.all.zos_data_set(name=default_data_set, state="absent") + hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -318,10 +359,13 @@ def test_normal_dispositions_data_set( ], ) def test_space_types(ansible_zos_module, space_type, primary, secondary, expected): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -343,7 +387,7 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -359,6 +403,8 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte assert str(expected) in result.get("stdout", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -366,12 +412,15 @@ def test_space_types(ansible_zos_module, space_type, primary, secondary, expecte ["pds", "pdse", "large", "basic", "seq"], ) def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_systems): + idcams_dataset = None try: hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() default_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -388,7 +437,7 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_s { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -399,6 +448,8 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_s assert "BGYSC1103E" not in result.get("stderr", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -406,12 +457,15 @@ def test_data_set_types_non_vsam(ansible_zos_module, data_set_type, volumes_on_s ["ksds", "rrds", "lds", "esds"], ) def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_systems): + idcams_dataset = None try: hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() default_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -441,7 +495,7 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -450,9 +504,11 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste # * because that means data set exists and is VSAM so we can't read it results = hosts.all.command(cmd=f"head \"//'{default_data_set}'\"") for result in results.contacted.values(): - assert "EDC5041I" or "EDC5049I" in result.get("stderr", "") + assert "EDC5041I" in result.get("stderr", "") or "EDC5049I" in result.get("stderr", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -460,12 +516,15 @@ def test_data_set_types_vsam(ansible_zos_module, data_set_type, volumes_on_syste ["u", "vb", "vba", "fb", "fba"], ) def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): + idcams_dataset = None try: hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() default_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -482,7 +541,7 @@ def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -494,6 +553,8 @@ def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): assert str(f" {record_format.upper()} ") in result.get("stdout", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -507,11 +568,13 @@ def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): ], ) def test_return_content_type(ansible_zos_module, return_content_type, expected, volumes_on_systems): + idcams_dataset = None try: hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() default_data_set = get_tmp_ds_name() + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) results = hosts.all.zos_data_set( name=default_data_set, type="seq", @@ -519,6 +582,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, replace=True, volumes=[volume_1], ) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -537,7 +601,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -549,6 +613,8 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) finally: hosts.all.zos_data_set(name=default_data_set, state="absent", volumes=[volume_1]) + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -565,11 +631,13 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, def test_return_text_content_encodings( ansible_zos_module, src_encoding, response_encoding, expected, volumes_on_systems ): + idcams_dataset = None try: volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() hosts = ansible_zos_module default_data_set = get_tmp_ds_name() + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) results = hosts.all.zos_data_set( name=default_data_set, type="seq", @@ -597,7 +665,7 @@ def test_return_text_content_encodings( { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -608,15 +676,20 @@ def test_return_text_content_encodings( assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) finally: hosts.all.zos_data_set(name=default_data_set, state="absent", volumes=[volume_1]) + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_reuse_existing_data_set(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( name=default_data_set, type="seq", state="present", replace=True ) + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="IDCAMS", auth=True, @@ -636,7 +709,7 @@ def test_reuse_existing_data_set(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -647,15 +720,20 @@ def test_reuse_existing_data_set(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_replace_existing_data_set(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set( name=default_data_set, type="seq", state="present", replace=True ) + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="IDCAMS", auth=True, @@ -675,7 +753,7 @@ def test_replace_existing_data_set(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -686,13 +764,18 @@ def test_replace_existing_data_set(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_replace_existing_data_set_make_backup(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + hosts.all.zos_mvs_raw( program_name="IDCAMS", auth=True, @@ -712,7 +795,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -737,7 +820,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -748,7 +831,7 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): assert len(result.get("backups", [])) > 0 assert result.get("backups")[0].get("backup_name") is not None results2 = hosts.all.command( - cmd="head \"//'{0}'\"".format(result.get("backups")[0].get("backup_name")) + cmd=f"head \"//'{result.get('backups')[0].get('backup_name')}'\""#.format() ) hosts.all.zos_data_set( name=result.get("backups")[0].get("backup_name"), state="absent" @@ -761,15 +844,20 @@ def test_replace_existing_data_set_make_backup(ansible_zos_module): assert "IDCAMS" in result.get("stdout", "") finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_data_set_name_gdgs(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name(3, 3) hosts.all.shell(cmd="dtouch -tGDG -L4 {0}".format(default_data_set)) hosts.all.shell(cmd="""dtouch -tseq "{0}(+1)" """.format(default_data_set)) hosts.all.shell(cmd="""dtouch -tseq "{0}(+1)" """.format(default_data_set)) + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -781,7 +869,7 @@ def test_data_set_name_gdgs(ansible_zos_module): return_content=dict(type="text"), ), ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + dict(dd_input=dict(dd_name=SYSIN_DD, content=idcams_listcat_dataset_cmd)), ], ) for result in results.contacted.values(): @@ -799,7 +887,7 @@ def test_data_set_name_gdgs(ansible_zos_module): return_content=dict(type="text"), ), ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + dict(dd_input=dict(dd_name=SYSIN_DD, content=idcams_listcat_dataset_cmd)), ], ) for result in results.contacted.values(): @@ -818,7 +906,7 @@ def test_data_set_name_gdgs(ansible_zos_module): return_content=dict(type="text"), ), ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + dict(dd_input=dict(dd_name=SYSIN_DD, content=idcams_listcat_dataset_cmd)), ], ) for result in results.contacted.values(): @@ -837,20 +925,25 @@ def test_data_set_name_gdgs(ansible_zos_module): return_content=dict(type="text"), ), ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + dict(dd_input=dict(dd_name=SYSIN_DD, content=idcams_listcat_dataset_cmd)), ], ) for result in results.contacted.values(): assert result.get("ret_code", {}).get("code", -1) == 8 finally: hosts.all.shell(cmd="""drm "ANSIBLE.*" """) + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_data_set_name_special_characters(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name(5, 6, symbols=True) hosts.all.zos_data_set(name=default_data_set, type="seq", state="present") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -862,7 +955,7 @@ def test_data_set_name_special_characters(ansible_zos_module): return_content=dict(type="text"), ), ), - dict(dd_input=dict(dd_name=SYSIN_DD, content=IDCAMS_STDIN)), + dict(dd_input=dict(dd_name=SYSIN_DD, content=idcams_listcat_dataset_cmd)), ], ) for result in results.contacted.values(): @@ -870,6 +963,9 @@ def test_data_set_name_special_characters(ansible_zos_module): assert len(result.get("dd_names", [])) > 0 finally: hosts.all.shell(cmd="""drm "ANSIBLE.*" """) + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") + # ---------------------------------------------------------------------------- # # Input DD Tests # # ---------------------------------------------------------------------------- # @@ -950,13 +1046,17 @@ def test_input_large(ansible_zos_module): def test_input_provided_as_list(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + contents = [] for i in range(10): - contents.append(IDCAMS_STDIN) + contents.append(idcams_listcat_dataset_cmd) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -986,6 +1086,8 @@ def test_input_provided_as_list(ansible_zos_module): assert len(result.get("dd_names", [{}])[0].get("content")) > 100 finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -999,10 +1101,13 @@ def test_input_provided_as_list(ansible_zos_module): ], ) def test_input_return_content_types(ansible_zos_module, return_content_type, expected): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1018,7 +1123,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, "return_content":{ "type":return_content_type }, @@ -1032,6 +1137,8 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp assert expected in "\n".join(result.get("dd_names", [{}])[0].get("content")) finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -1040,7 +1147,7 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp ( "iso8859-1", "ibm-1047", - "|\udceeqBFfeF|g\udcefF\udcfdqgB\udcd4\udcd0CBg\udcfdҿ\udcfdqGeFgҿ\udcfd", + "|\udceeqBFfeF|g\udcefF\udcfdqgB\udcd4\udcd0", ), ( "ibm-1047", @@ -1052,10 +1159,13 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp def test_input_return_text_content_encodings( ansible_zos_module, src_encoding, response_encoding, expected ): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1071,7 +1181,7 @@ def test_input_return_text_content_encodings( { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, "return_content":{ "type":"text", "src_encoding":src_encoding, @@ -1087,6 +1197,8 @@ def test_input_return_text_content_encodings( assert expected in "\n".join(result.get("dd_names", [{}])[0].get("content")) finally: hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") # ---------------------------------------------------------------------------- # @@ -1095,35 +1207,44 @@ def test_input_return_text_content_encodings( def test_failing_path_name(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_mvs_raw( - program_name="idcams", - auth=True, - dds=[ - { - "dd_unix":{ - "dd_name":SYSPRINT_DD, - "path":"1dfa3f4rafwer/f2rfsd", + idcams_dataset = None + try: + hosts = ansible_zos_module + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( + program_name="idcams", + auth=True, + dds=[ + { + "dd_unix":{ + "dd_name":SYSPRINT_DD, + "path":"1dfa3f4rafwer/f2rfsd", + }, }, - }, - { - "dd_input":{ - "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, - } - }, - ], - ) - for result in results.contacted.values(): - assert result.get("ret_code", {}).get("code", -1) == 8 - assert "ValueError" in result.get("msg", "") + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":idcams_listcat_dataset_cmd, + } + }, + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", -1) == 8 + assert "ValueError" in result.get("msg", "") + finally: + if idcams_dataset: + results = hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_create_new_file(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1137,7 +1258,7 @@ def test_create_new_file(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1149,13 +1270,18 @@ def test_create_new_file(ansible_zos_module): assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_write_to_existing_file(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="present") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1169,7 +1295,7 @@ def test_write_to_existing_file(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1181,16 +1307,21 @@ def test_write_to_existing_file(ansible_zos_module): assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( "normal_disposition,expected", [("keep", True), ("delete", False)] ) def test_file_normal_disposition(ansible_zos_module, normal_disposition, expected): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="present") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1205,7 +1336,7 @@ def test_file_normal_disposition(ansible_zos_module, normal_disposition, expecte { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1217,13 +1348,18 @@ def test_file_normal_disposition(ansible_zos_module, normal_disposition, expecte assert result.get("stat", {}).get("exists", not expected) is expected finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize("mode,expected", [(644, "0644"), (755, "0755")]) def test_file_modes(ansible_zos_module, mode, expected): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1238,7 +1374,7 @@ def test_file_modes(ansible_zos_module, mode, expected): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1250,6 +1386,8 @@ def test_file_modes(ansible_zos_module, mode, expected): assert result.get("stat", {}).get("mode", "") == expected finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -1261,9 +1399,12 @@ def test_file_modes(ansible_zos_module, mode, expected): ], ) def test_file_path_options(ansible_zos_module, access_group, status_group): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1279,7 +1420,7 @@ def test_file_path_options(ansible_zos_module, access_group, status_group): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1291,6 +1432,8 @@ def test_file_path_options(ansible_zos_module, access_group, status_group): assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -1298,10 +1441,13 @@ def test_file_path_options(ansible_zos_module, access_group, status_group): [10, 20, 50, 80, 120], ) def test_file_block_size(ansible_zos_module, block_size): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1316,7 +1462,7 @@ def test_file_block_size(ansible_zos_module, block_size): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1328,6 +1474,8 @@ def test_file_block_size(ansible_zos_module, block_size): assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -1335,10 +1483,13 @@ def test_file_block_size(ansible_zos_module, block_size): [10, 20, 50, 80, 120], ) def test_file_record_length(ansible_zos_module, record_length): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1353,7 +1504,7 @@ def test_file_record_length(ansible_zos_module, record_length): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1365,6 +1516,8 @@ def test_file_record_length(ansible_zos_module, record_length): assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -1372,10 +1525,13 @@ def test_file_record_length(ansible_zos_module, record_length): ["u", "vb", "vba", "fb", "fba"], ) def test_file_record_format(ansible_zos_module, record_format): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1390,7 +1546,7 @@ def test_file_record_format(ansible_zos_module, record_format): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1402,6 +1558,8 @@ def test_file_record_format(ansible_zos_module, record_format): assert "IDCAMS SYSTEM" in result.get("stdout", "") finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -1415,10 +1573,13 @@ def test_file_record_format(ansible_zos_module, record_format): ], ) def test_file_return_content(ansible_zos_module, return_content_type, expected): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1435,7 +1596,7 @@ def test_file_return_content(ansible_zos_module, return_content_type, expected): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1446,6 +1607,8 @@ def test_file_return_content(ansible_zos_module, return_content_type, expected): assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -1462,10 +1625,13 @@ def test_file_return_content(ansible_zos_module, return_content_type, expected): def test_file_return_text_content_encodings( ansible_zos_module, src_encoding, response_encoding, expected ): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1484,7 +1650,7 @@ def test_file_return_text_content_encodings( { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1495,6 +1661,8 @@ def test_file_return_text_content_encodings( assert expected in "\n".join(result.get("dd_names")[0].get("content", [])) finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") # ---------------------------------------------------------------------------- # @@ -1503,10 +1671,13 @@ def test_file_return_text_content_encodings( def test_dummy(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1519,7 +1690,7 @@ def test_dummy(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1529,6 +1700,8 @@ def test_dummy(ansible_zos_module): assert len(result.get("dd_names", [])) == 0 finally: hosts.all.file(path=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") # ---------------------------------------------------------------------------- # @@ -1537,12 +1710,15 @@ def test_dummy(ansible_zos_module): def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() default_data_set_2 = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=default_data_set_2, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1574,7 +1750,7 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1587,15 +1763,20 @@ def test_concatenation_with_data_set_dd_and_response(ansible_zos_module): finally: hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=default_data_set_2, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() default_data_set_2 = get_tmp_ds_name() hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") hosts.all.zos_data_set(name=default_data_set_2, state="present", type="seq") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1631,7 +1812,7 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1658,9 +1839,12 @@ def test_concatenation_with_data_set_dd_with_replace_and_backup(ansible_zos_modu finally: hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=default_data_set_2, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_concatenation_with_data_set_member(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set = get_tmp_ds_name() @@ -1668,6 +1852,8 @@ def test_concatenation_with_data_set_member(ansible_zos_module): default_data_set_with_member = default_data_set + '(MEM)' hosts.all.zos_data_set(name=default_data_set, state="present", type="pds") hosts.all.zos_data_set(name=default_data_set_2, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1697,7 +1883,7 @@ def test_concatenation_with_data_set_member(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1715,15 +1901,20 @@ def test_concatenation_with_data_set_member(ansible_zos_module): finally: hosts.all.zos_data_set(name=default_data_set, state="absent") hosts.all.zos_data_set(name=default_data_set_2, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module default_data_set_2 = get_tmp_ds_name() hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") hosts.all.zos_data_set(name=default_data_set_2, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1753,7 +1944,7 @@ def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1766,13 +1957,18 @@ def test_concatenation_with_unix_dd_and_response_datasets(ansible_zos_module): finally: hosts.all.file(name=DEFAULT_PATH, state="absent") hosts.all.zos_data_set(name=default_data_set_2, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_concatenation_with_unix_dd_and_response_uss(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( program_name="idcams", auth=True, @@ -1803,7 +1999,7 @@ def test_concatenation_with_unix_dd_and_response_uss(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, + "content":idcams_listcat_dataset_cmd, } }, ], @@ -1815,41 +2011,49 @@ def test_concatenation_with_unix_dd_and_response_uss(ansible_zos_module): assert "Hello world!" in "\n".join(result.get("dd_names")[1].get("content", [])) finally: hosts.all.file(name=DEFAULT_PATH, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_mvs_raw( - program_name="idcams", - auth=True, - dds=[ - { - "dd_concat":{ - "dd_name":SYSPRINT_DD, - "dds":[ - { - "dd_dummy":{ - "path":DEFAULT_PATH_WITH_FILE, - "return_content":{ - "type":"text" + idcams_dataset = None + try: + hosts = ansible_zos_module + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( + program_name="idcams", + auth=True, + dds=[ + { + "dd_concat":{ + "dd_name":SYSPRINT_DD, + "dds":[ + { + "dd_dummy":{ + "path":DEFAULT_PATH_WITH_FILE, + "return_content":{ + "type":"text" + }, }, + "dd_concat":{}, }, - "dd_concat":{}, - }, - ], + ], + }, }, - }, - { - "dd_input":{ - "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, - } - }, - ], - ) - for result in results.contacted.values(): - assert result.get("ret_code", {}).get("code", -1) == -1 - assert "Unsupported parameters" in result.get("msg", "") + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":idcams_listcat_dataset_cmd, + } + }, + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", -1) == -1 + assert "Unsupported parameters" in result.get("msg", "") + finally: + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") @pytest.mark.parametrize( @@ -1889,12 +2093,6 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): ], }, }, - { - "dd_input":{ - "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, - } - }, ], 2, "Hello world!", @@ -1933,12 +2131,6 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): ], }, }, - { - "dd_input":{ - "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, - } - }, ], 2, "Hello world!", @@ -1977,12 +2169,6 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): ], }, }, - { - "dd_input":{ - "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN, - } - }, ], 0, "IDCAMS", @@ -1990,12 +2176,26 @@ def test_concatenation_fail_with_unsupported_dd_type(ansible_zos_module): ], ) def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_content): + idcams_dataset = None try: hosts = ansible_zos_module + default_data_set = "ANSIBLE.USER.PRIVATE.TEST" hosts.all.zos_data_set(name=default_data_set, state="present", type="seq") + hosts.all.file(path=DEFAULT_PATH, state="directory") hosts.all.file(path=DEFAULT_PATH_WITH_FILE, state="absent") + + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + dds.append( + { + 'dd_input': { + "dd_name": SYSIN_DD, + "content": idcams_listcat_dataset_cmd + } + } + ) + results = hosts.all.zos_mvs_raw(program_name="idcams", auth=True, dds=dds) for result in results.contacted.values(): assert result.get("ret_code", {}).get("code", -1) == 0 @@ -2007,6 +2207,8 @@ def test_concatenation_all_dd_types(ansible_zos_module, dds, input_pos, input_co finally: hosts.all.file(name=DEFAULT_PATH, state="absent") hosts.all.zos_data_set(name=default_data_set, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") # ---------------------------------------------------------------------------- # @@ -2121,29 +2323,35 @@ def test_with_parms(ansible_zos_module): def test_with_multiple_of_same_dd_name(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_mvs_raw( - pgm="idcams", - auth=True, - dds=[ - { - "dd_input":{ - "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN - } - }, - { - "dd_input":{ - "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN - } - }, - ], - ) - for result in results.contacted.values(): - assert result.get("ret_code", {}).get("code", -1) == 8 - assert len(result.get("dd_names", [])) == 0 - assert "BGYSC0228E" in result.get("msg", "") + idcams_dataset = None + try: + hosts = ansible_zos_module + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + results = hosts.all.zos_mvs_raw( + pgm="idcams", + auth=True, + dds=[ + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":idcams_listcat_dataset_cmd + } + }, + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":idcams_listcat_dataset_cmd + } + }, + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", -1) == 8 + assert len(result.get("dd_names", [])) == 0 + assert "BGYSC0228E" in result.get("msg", "") + finally: + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") # ---------------------------------------------------------------------------- # @@ -2152,27 +2360,34 @@ def test_with_multiple_of_same_dd_name(ansible_zos_module): def test_vio_as_output(ansible_zos_module): - hosts = ansible_zos_module - results = hosts.all.zos_mvs_raw( - program_name="idcams", - auth=True, - dds=[ - { - "dd_vio":{ - "dd_name":SYSPRINT_DD, + idcams_dataset = None + try: + hosts = ansible_zos_module + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) + + results = hosts.all.zos_mvs_raw( + program_name="idcams", + auth=True, + dds=[ + { + "dd_vio":{ + "dd_name":SYSPRINT_DD, + }, }, - }, - { - "dd_input":{ - "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN - } - }, - ], - ) - for result in results.contacted.values(): - assert result.get("ret_code", {}).get("code", 0) == 0 - assert len(result.get("dd_names", [])) == 0 + { + "dd_input":{ + "dd_name":SYSIN_DD, + "content":idcams_listcat_dataset_cmd + } + }, + ], + ) + for result in results.contacted.values(): + assert result.get("ret_code", {}).get("code", 0) == 0 + assert len(result.get("dd_names", [])) == 0 + finally: + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") # ---------------------------------------------------------------------------- # @@ -2181,9 +2396,11 @@ def test_vio_as_output(ansible_zos_module): def test_output_dd(ansible_zos_module): + idcams_dataset = None try: hosts = ansible_zos_module data_set_name = None + idcams_dataset, idcams_listcat_dataset_cmd = get_temp_idcams_dataset(hosts) results = hosts.all.zos_mvs_raw( program_name="idcams", @@ -2200,7 +2417,7 @@ def test_output_dd(ansible_zos_module): { "dd_input":{ "dd_name":SYSIN_DD, - "content":IDCAMS_STDIN + "content":idcams_listcat_dataset_cmd } }, ], @@ -2215,3 +2432,5 @@ def test_output_dd(ansible_zos_module): finally: if data_set_name: hosts.all.zos_data_set(name=data_set_name, state="absent") + if idcams_dataset: + hosts.all.zos_data_set(name=idcams_dataset, state="absent") From c4a6cd665f6c5ebb2396746bf55de15e2cfd5da0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Sun, 11 Aug 2024 19:15:00 -0600 Subject: [PATCH 445/495] [Enabler][1578]backup_restore_portability (#1635) * Set new test * Add restore lower case * Add correct distinct case * Add good import * Fix alt * Change to get random qualifier * Add fragment * Change bad import * Get unique uss files names --- .../1635-backup_restore_portability.yml | 3 + .../modules/test_zos_backup_restore.py | 261 +++++++++++------- 2 files changed, 163 insertions(+), 101 deletions(-) create mode 100644 changelogs/fragments/1635-backup_restore_portability.yml diff --git a/changelogs/fragments/1635-backup_restore_portability.yml b/changelogs/fragments/1635-backup_restore_portability.yml new file mode 100644 index 000000000..dd5bf061a --- /dev/null +++ b/changelogs/fragments/1635-backup_restore_portability.yml @@ -0,0 +1,3 @@ +trivial: + - test_zos_backup_restore - Remove the use of hard coded hlq and files names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1635). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index c265f146a..fff5bd6aa 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -15,20 +15,21 @@ __metaclass__ = type -from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.dataset import ( + get_tmp_ds_name, + get_random_q, +) import pytest from re import search, IGNORECASE, MULTILINE import string import random +from datetime import datetime + +DATA_SET_CONTENTS = "HELLO WORLD" -DATA_SET_CONTENTS = "HELLO world" -DATA_SET_QUALIFIER = "{0}.PRIVATE.TESTDS" -DATA_SET_QUALIFIER2 = "{0}.PRIVATE.TESTDS2" -DATA_SET_BACKUP_LOCATION = "MY.BACKUP" -UNIX_BACKUP_LOCATION = "/tmp/mybackup.dzp" -NEW_HLQ = "TMPHLQ" -DATA_SET_RESTORE_LOCATION = DATA_SET_QUALIFIER.format(NEW_HLQ) -DATA_SET_RESTORE_LOCATION2 = DATA_SET_QUALIFIER2.format(NEW_HLQ) +def get_unique_uss_file_name(): + unique_str = "n" + datetime.now().strftime("%H:%M:%S").replace("-", "").replace(":", "") + ".dzp" + return "/tmp/{0}".format(unique_str) # ---------------------------------------------------------------------------- # # Helper functions # @@ -73,10 +74,13 @@ def delete_data_set(hosts, data_set_name): def delete_file(hosts, path): hosts.all.file(path=path, state="absent") -def delete_remnants(hosts): +def delete_remnants(hosts, list=None): hosts.all.shell(cmd="drm 'ANSIBLE.*'") hosts.all.shell(cmd="drm 'TEST.*'") hosts.all.shell(cmd="drm 'TMPHLQ.*'") + if list: + for object in list: + hosts.all.shell(cmd="drm '{0}.*'".format(object)) def get_unused_volume_serial(hosts): found = False @@ -183,19 +187,23 @@ def assert_file_does_not_exist(hosts, path): @pytest.mark.parametrize( "backup_name,overwrite,recover", [ - (DATA_SET_BACKUP_LOCATION, False, False), - (DATA_SET_BACKUP_LOCATION, True, True), - (DATA_SET_BACKUP_LOCATION, False, True), - (DATA_SET_BACKUP_LOCATION, True, False), - (UNIX_BACKUP_LOCATION, False, False), - (UNIX_BACKUP_LOCATION, True, True), - (UNIX_BACKUP_LOCATION, False, True), - (UNIX_BACKUP_LOCATION, True, False), + ("DATA_SET", False, False), + ("DATA_SET", True, True), + ("DATA_SET", False, True), + ("DATA_SET", True, False), + ("UNIX", False, False), + ("UNIX", True, True), + ("UNIX", False, True), + ("UNIX", True, False), ], ) def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover): hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + if backup_name == "DATA_SET": + backup_name = get_tmp_ds_name(1,1) + else: + backup_name = get_unique_uss_file_name() try: if not overwrite: delete_data_set_or_file(hosts, backup_name) @@ -221,10 +229,10 @@ def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover) @pytest.mark.parametrize( "backup_name,overwrite", [ - (DATA_SET_BACKUP_LOCATION, False), - (DATA_SET_BACKUP_LOCATION, True), - (UNIX_BACKUP_LOCATION, False), - (UNIX_BACKUP_LOCATION, True), + ("DATA_SET", False), + ("DATA_SET", True), + ("UNIX", False), + ("UNIX", True), ], ) def test_backup_of_data_set_when_backup_dest_exists( @@ -232,6 +240,10 @@ def test_backup_of_data_set_when_backup_dest_exists( ): hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + if backup_name == "DATA_SET": + backup_name = get_tmp_ds_name(1,1) + else: + backup_name = get_unique_uss_file_name() try: create_data_set_or_file_with_contents(hosts, backup_name, DATA_SET_CONTENTS) assert_data_set_or_file_exists(hosts, backup_name) @@ -258,22 +270,28 @@ def test_backup_of_data_set_when_backup_dest_exists( @pytest.mark.parametrize( "backup_name,overwrite,recover", [ - (DATA_SET_BACKUP_LOCATION, False, False), - (DATA_SET_BACKUP_LOCATION, True, True), - (DATA_SET_BACKUP_LOCATION, False, True), - (DATA_SET_BACKUP_LOCATION, True, False), - (UNIX_BACKUP_LOCATION, False, False), - (UNIX_BACKUP_LOCATION, True, True), - (UNIX_BACKUP_LOCATION, False, True), - (UNIX_BACKUP_LOCATION, True, False), + ("DATA_SET", False, False), + ("DATA_SET", True, True), + ("DATA_SET", False, True), + ("DATA_SET", True, False), + ("UNIX", False, False), + ("UNIX", True, True), + ("UNIX", False, True), + ("UNIX", True, False), ], ) def test_backup_and_restore_of_data_set( ansible_zos_module, backup_name, overwrite, recover ): + hlqs = [] hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - new_hlq = NEW_HLQ + new_hlq = "N" + get_random_q(4) + hlqs.append(new_hlq) + if backup_name == "DATA_SET": + backup_name = get_tmp_ds_name(1,1) + else: + backup_name = get_unique_uss_file_name() try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -288,7 +306,8 @@ def test_backup_and_restore_of_data_set( recover=recover, ) if not overwrite: - new_hlq = "TEST" + new_hlq = "N" + get_random_q(4) + hlqs.append(new_hlq) assert_module_did_not_fail(results) assert_data_set_or_file_exists(hosts, backup_name) results = hosts.all.zos_backup_restore( @@ -301,29 +320,36 @@ def test_backup_and_restore_of_data_set( finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) - delete_remnants(hosts) + delete_remnants(hosts, hlqs) @pytest.mark.parametrize( "backup_name,space,space_type", [ - (DATA_SET_BACKUP_LOCATION, 10, "m"), - (DATA_SET_BACKUP_LOCATION, 10000, "k"), - (DATA_SET_BACKUP_LOCATION, 10, None), - (DATA_SET_BACKUP_LOCATION, 2, "cyl"), - (DATA_SET_BACKUP_LOCATION, 10, "trk"), - (UNIX_BACKUP_LOCATION, 10, "m"), - (UNIX_BACKUP_LOCATION, 10000, "k"), - (UNIX_BACKUP_LOCATION, 10, None), - (UNIX_BACKUP_LOCATION, 2, "cyl"), - (UNIX_BACKUP_LOCATION, 10, "trk"), + ("DATA_SET", 10, "m"), + ("DATA_SET", 10000, "k"), + ("DATA_SET", 10, None), + ("DATA_SET", 2, "cyl"), + ("DATA_SET", 10, "trk"), + ("UNIX", 10, "m"), + ("UNIX", 10000, "k"), + ("UNIX", 10, None), + ("UNIX", 2, "cyl"), + ("UNIX", 10, "trk"), ], ) def test_backup_and_restore_of_data_set_various_space_measurements( ansible_zos_module, backup_name, space, space_type ): + hlqs = [] hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + new_hlq = "N" + get_random_q(4) + hlqs.append(new_hlq) + if backup_name == "DATA_SET": + backup_name = get_tmp_ds_name(1,1) + else: + backup_name = get_unique_uss_file_name() try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -345,7 +371,7 @@ def test_backup_and_restore_of_data_set_various_space_measurements( args = dict( operation="restore", backup_name=backup_name, - hlq=NEW_HLQ, + hlq=new_hlq, overwrite=True, space=space, ) @@ -362,17 +388,24 @@ def test_backup_and_restore_of_data_set_various_space_measurements( @pytest.mark.parametrize( "backup_name,overwrite", [ - (DATA_SET_BACKUP_LOCATION, False), - (DATA_SET_BACKUP_LOCATION, True), - (UNIX_BACKUP_LOCATION, False), - (UNIX_BACKUP_LOCATION, True), + ("DATA_SET", False), + ("DATA_SET", True), + ("UNIX", False), + ("UNIX", True), ], ) def test_backup_and_restore_of_data_set_when_restore_location_exists( ansible_zos_module, backup_name, overwrite ): + hlqs = [] hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + new_hlq = "N" + get_random_q(4) + hlqs.append(new_hlq) + if backup_name == "DATA_SET": + backup_name = get_tmp_ds_name(1,1) + else: + backup_name = get_unique_uss_file_name() try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -389,13 +422,13 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( results = hosts.all.zos_backup_restore( operation="restore", backup_name=backup_name, - hlq=NEW_HLQ, + hlq=new_hlq, ) assert_module_did_not_fail(results) results = hosts.all.zos_backup_restore( operation="restore", backup_name=backup_name, - hlq=NEW_HLQ, + hlq=new_hlq, overwrite=overwrite, ) if overwrite: @@ -405,7 +438,7 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) - delete_remnants(hosts) + delete_remnants(hosts, hlqs) def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): @@ -413,10 +446,11 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): data_set_name = get_tmp_ds_name() data_set_name2 = get_tmp_ds_name() data_set_include = [data_set_name, data_set_name2] + data_set_backup_location = get_tmp_ds_name(1, 1) try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_data_set_or_file(hosts, data_set_backup_location) create_sequential_data_set_with_contents( hosts, data_set_name, DATA_SET_CONTENTS ) @@ -426,13 +460,13 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): results = hosts.all.zos_backup_restore( operation="backup", data_sets=dict(include=data_set_include), - backup_name=DATA_SET_BACKUP_LOCATION, + backup_name=data_set_backup_location, ) assert_module_did_not_fail(results) - assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) + assert_data_set_or_file_exists(hosts, data_set_backup_location) results = hosts.all.zos_backup_restore( operation="restore", - backup_name=DATA_SET_BACKUP_LOCATION, + backup_name=data_set_backup_location, overwrite=True, recover=True, ) @@ -440,19 +474,23 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_data_set_or_file(hosts, data_set_backup_location) delete_remnants(hosts) def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module): + hlqs = [] hosts = ansible_zos_module data_set_name = get_tmp_ds_name() data_set_name2 = get_tmp_ds_name() data_sets_hlq = "ANSIBLE.**" + data_set_backup_location = get_tmp_ds_name(1, 1) + new_hlq = "N" + get_random_q(4) + hlqs.append(new_hlq) try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_data_set_or_file(hosts, data_set_backup_location) create_sequential_data_set_with_contents( hosts, data_set_name, DATA_SET_CONTENTS ) @@ -462,35 +500,40 @@ def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module): results = hosts.all.zos_backup_restore( operation="backup", data_sets=dict(include=data_sets_hlq), - backup_name=DATA_SET_BACKUP_LOCATION, + backup_name=data_set_backup_location, ) assert_module_did_not_fail(results) - assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) + assert_data_set_or_file_exists(hosts, data_set_backup_location) results = hosts.all.zos_backup_restore( operation="restore", - backup_name=DATA_SET_BACKUP_LOCATION, + backup_name=data_set_backup_location, overwrite=True, recover=True, - hlq=NEW_HLQ, + hlq=new_hlq, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_BACKUP_LOCATION) + assert_data_set_exists(hosts, data_set_backup_location) finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) - delete_remnants(hosts) + delete_data_set_or_file(hosts, data_set_backup_location) + delete_remnants(hosts, hlqs) def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): + hlqs = [] hosts = ansible_zos_module data_set_name = get_tmp_ds_name() data_set_name2 = get_tmp_ds_name() + data_set_restore_location2 = get_tmp_ds_name(1, 1) + data_set_backup_location = get_tmp_ds_name(1, 1) + new_hlq = "N" + get_random_q(4) + hlqs.append(new_hlq) try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_data_set_or_file(hosts, data_set_restore_location2) + delete_data_set_or_file(hosts, data_set_backup_location) create_sequential_data_set_with_contents( hosts, data_set_name, DATA_SET_CONTENTS ) @@ -500,63 +543,72 @@ def test_backup_and_restore_exclude_from_pattern(ansible_zos_module): results = hosts.all.zos_backup_restore( operation="backup", data_sets=dict(include="ANSIBLE.**", exclude=data_set_name2), - backup_name=DATA_SET_BACKUP_LOCATION, + backup_name=data_set_backup_location, ) assert_module_did_not_fail(results) - assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) + assert_data_set_or_file_exists(hosts, data_set_backup_location) results = hosts.all.zos_backup_restore( operation="restore", - backup_name=DATA_SET_BACKUP_LOCATION, + backup_name=data_set_backup_location, overwrite=True, recover=True, - hlq=NEW_HLQ, + hlq=new_hlq, ) assert_module_did_not_fail(results) - assert_data_set_exists(hosts, DATA_SET_BACKUP_LOCATION) - assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION2) + assert_data_set_exists(hosts, data_set_backup_location) + assert_data_set_does_not_exist(hosts, data_set_restore_location2) finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) - delete_remnants(hosts) + delete_data_set_or_file(hosts, data_set_restore_location2) + delete_data_set_or_file(hosts, data_set_backup_location) + delete_remnants(hosts, hlqs) @pytest.mark.parametrize( "backup_name", [ - DATA_SET_BACKUP_LOCATION, - DATA_SET_BACKUP_LOCATION, - UNIX_BACKUP_LOCATION, - UNIX_BACKUP_LOCATION, + "DATA_SET", + "DATA_SET", + "UNIX", + "UNIX", ], ) def test_restore_of_data_set_when_backup_does_not_exist( ansible_zos_module, backup_name ): + hlqs = [] hosts = ansible_zos_module + data_set_restore_location = get_tmp_ds_name(2, 2) + if backup_name == "DATA_SET": + backup_name = get_tmp_ds_name(1,1) + else: + backup_name = get_unique_uss_file_name() + new_hlq = "N" + get_random_q(4) + hlqs.append(new_hlq) try: - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) + delete_data_set_or_file(hosts, data_set_restore_location) delete_data_set_or_file(hosts, backup_name) results = hosts.all.zos_backup_restore( operation="restore", backup_name=backup_name, - hlq=NEW_HLQ, + hlq=new_hlq, ) assert_module_failed(results) finally: - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) + delete_data_set_or_file(hosts, data_set_restore_location) delete_data_set_or_file(hosts, backup_name) - delete_remnants(hosts) + delete_remnants(hosts, hlqs) + @pytest.mark.parametrize( "backup_name", [ - DATA_SET_BACKUP_LOCATION, - DATA_SET_BACKUP_LOCATION, - UNIX_BACKUP_LOCATION, - UNIX_BACKUP_LOCATION, + "DATA_SET", + "DATA_SET", + "UNIX", + "UNIX", ], ) def test_backup_of_data_set_when_data_set_does_not_exist( @@ -579,12 +631,14 @@ def test_backup_of_data_set_when_data_set_does_not_exist( delete_data_set_or_file(hosts, backup_name) delete_remnants(hosts) + def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + data_set_backup_location = get_tmp_ds_name(1, 1) try: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_data_set_or_file(hosts, data_set_backup_location) create_sequential_data_set_with_contents( hosts, data_set_name, DATA_SET_CONTENTS ) @@ -593,47 +647,52 @@ def test_backup_of_data_set_when_volume_does_not_exist(ansible_zos_module): data_sets=dict(include=data_set_name), # volume=get_unused_volume_serial(hosts), volume="@@@@", - backup_name=DATA_SET_BACKUP_LOCATION, + backup_name=data_set_backup_location, ) assert_module_failed(results) - assert_data_set_does_not_exist(hosts, DATA_SET_BACKUP_LOCATION) + assert_data_set_does_not_exist(hosts, data_set_backup_location) finally: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_data_set_or_file(hosts, data_set_backup_location) delete_remnants(hosts) def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): + hlqs = [] hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + data_set_restore_location = get_tmp_ds_name() + data_set_backup_location = get_tmp_ds_name(1, 1) + new_hlq = "N" + get_random_q(4) + hlqs.append(new_hlq) try: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) + delete_data_set_or_file(hosts, data_set_restore_location) + delete_data_set_or_file(hosts, data_set_backup_location) create_sequential_data_set_with_contents( hosts, data_set_name, DATA_SET_CONTENTS ) results = hosts.all.zos_backup_restore( operation="backup", data_sets=dict(include=data_set_name), - backup_name=DATA_SET_BACKUP_LOCATION, + backup_name=data_set_backup_location, ) assert_module_did_not_fail(results) - assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) + assert_data_set_or_file_exists(hosts, data_set_backup_location) results = hosts.all.zos_backup_restore( operation="restore", - backup_name=DATA_SET_BACKUP_LOCATION, - hlq=NEW_HLQ, + backup_name=data_set_backup_location, + hlq=new_hlq, # volume=get_unused_volume_serial(hosts), volume="@@@@", ) assert_module_failed(results) - assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION) + assert_data_set_does_not_exist(hosts, data_set_restore_location) finally: delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) - delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) - delete_remnants(hosts) + delete_data_set_or_file(hosts, data_set_restore_location) + delete_data_set_or_file(hosts, data_set_backup_location) + delete_remnants(hosts, hlqs) # def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module): From fa19159a560ded8aa31822274f55ed2a88ef115d Mon Sep 17 00:00:00 2001 From: Ketan Kelkar <ktnklkr@gmail.com> Date: Mon, 12 Aug 2024 09:53:18 -0700 Subject: [PATCH 446/495] Fix modules failing with verbosity with ansible-core:2.17 (#1640) * switch default value of ignore_sftp_stderr from False to True in zos_copy and zos_fetch modules Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * restore mistakenly removed portion of docs Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- ...40-quick-fix-ansible-core:2.17-verbosity-issue | 7 +++++++ plugins/action/zos_copy.py | 2 +- plugins/action/zos_fetch.py | 2 +- plugins/modules/zos_copy.py | 15 ++++++++------- plugins/modules/zos_fetch.py | 15 ++++++++------- 5 files changed, 25 insertions(+), 16 deletions(-) create mode 100644 changelogs/fragments/1640-quick-fix-ansible-core:2.17-verbosity-issue diff --git a/changelogs/fragments/1640-quick-fix-ansible-core:2.17-verbosity-issue b/changelogs/fragments/1640-quick-fix-ansible-core:2.17-verbosity-issue new file mode 100644 index 000000000..0b8ffe9a6 --- /dev/null +++ b/changelogs/fragments/1640-quick-fix-ansible-core:2.17-verbosity-issue @@ -0,0 +1,7 @@ +bugfixes: + - zos_copy - module would fail when an internal SFTP command wrote output to + stderr. Fix sets default value of existing module option `ignore_sftp_error` to True + (https://github.com/ansible-collections/ibm_zos_core/pull/1640). + - zos_fetch - module would fail when an internal SFTP command wrote output to + stderr. Fix sets default value of existing module option `ignore_sftp_error` to True + (https://github.com/ansible-collections/ibm_zos_core/pull/1640). \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 62bde96bf..8561045e5 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -61,7 +61,7 @@ def run(self, tmp=None, task_vars=None): force_lock = _process_boolean(task_args.get('force_lock'), default=False) executable = _process_boolean(task_args.get('executable'), default=False) asa_text = _process_boolean(task_args.get('asa_text'), default=False) - ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=False) + ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=True) backup_name = task_args.get("backup_name", None) encoding = task_args.get("encoding", None) mode = task_args.get("mode", None) diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 192b9ce6f..c3e4ec1ee 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -123,7 +123,7 @@ def run(self, tmp=None, task_vars=None): flat = _process_boolean(self._task.args.get('flat'), default=False) is_binary = _process_boolean(self._task.args.get('is_binary')) ignore_sftp_stderr = _process_boolean( - self._task.args.get("ignore_sftp_stderr"), default=False + self._task.args.get("ignore_sftp_stderr"), default=True ) validate_checksum = _process_boolean( self._task.args.get("validate_checksum"), default=True diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 6da0232a2..80a9a034f 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -201,18 +201,19 @@ required: false ignore_sftp_stderr: description: - - During data transfer through SFTP, the module fails if the SFTP command - directs any content to stderr. The user is able to override this - behavior by setting this parameter to C(true). By doing so, the module - would essentially ignore the stderr stream produced by SFTP and continue - execution. + - During data transfer through SFTP, the SFTP command directs content to + stderr. By default, the module essentially ignores the stderr stream + produced by SFTP and continues execution. The user is able to override + this behavior by setting this parameter to C(false). By doing so, any + content written to stderr is considered an error by Ansible and will + have module fail. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using B(-vvvv) or through environment variables such as B(verbosity = 4), then this parameter will automatically be set to C(true). type: bool required: false - default: false + default: true version_added: "1.4.0" is_binary: description: @@ -3822,7 +3823,7 @@ def main(): backup_name=dict(type='str'), local_follow=dict(type='bool', default=True), remote_src=dict(type='bool', default=False), - ignore_sftp_stderr=dict(type='bool', default=False), + ignore_sftp_stderr=dict(type='bool', default=True), validate=dict(type='bool', default=False), volume=dict(type='str', required=False), dest_data_set=dict( diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 574c5923c..9d22b58b6 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -122,18 +122,19 @@ type: str ignore_sftp_stderr: description: - - During data transfer through sftp, the module fails if the sftp command - directs any content to stderr. The user is able to override this - behavior by setting this parameter to C(true). By doing so, the module - would essentially ignore the stderr stream produced by sftp and continue - execution. + - During data transfer through SFTP, the SFTP command directs content to + stderr. By default, the module essentially ignores the stderr stream + produced by SFTP and continues execution. The user is able to override + this behavior by setting this parameter to C(false). By doing so, any + content written to stderr is considered an error by Ansible and will + have module fail. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using B(-vvvv) or through environment variables such as B(verbosity = 4), then this parameter will automatically be set to C(true). type: bool required: false - default: false + default: true notes: - When fetching PDSE and VSAM data sets, temporary storage will be used on the remote z/OS system. After the PDSE or VSAM data set is @@ -821,7 +822,7 @@ def run_module(): use_qualifier=dict(required=False, default=False, type="bool"), validate_checksum=dict(required=False, default=True, type="bool"), encoding=dict(required=False, type="dict"), - ignore_sftp_stderr=dict(type="bool", default=False, required=False), + ignore_sftp_stderr=dict(type="bool", default=True, required=False), tmp_hlq=dict(required=False, type="str", default=None), ) ) From e24407964ee3aa616994bbe2c1b12a720a6677c8 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 13 Aug 2024 09:21:01 -0700 Subject: [PATCH 447/495] Update module with RACF authority doc (#1647) * Update module with RACF authority doc Signed-off-by: ddimatos <dimatos@gmail.com> * Added changelog fragement for the PR Signed-off-by: ddimatos <dimatos@gmail.com> * Update test case f-string Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../1647-doc-backup-restore-racf-class.yml | 5 + docs/source/modules/zos_backup_restore.rst | 95 ++++++++++++------- plugins/modules/zos_backup_restore.py | 13 +++ .../modules/test_zos_backup_restore.py | 2 +- 4 files changed, 79 insertions(+), 36 deletions(-) create mode 100644 changelogs/fragments/1647-doc-backup-restore-racf-class.yml diff --git a/changelogs/fragments/1647-doc-backup-restore-racf-class.yml b/changelogs/fragments/1647-doc-backup-restore-racf-class.yml new file mode 100644 index 000000000..07c0abcf6 --- /dev/null +++ b/changelogs/fragments/1647-doc-backup-restore-racf-class.yml @@ -0,0 +1,5 @@ +trivial: + - zos_backup_restore - Added supplemental documentation explaining the RACF class + requirement when using module option restore. + (https://github.com/ansible-collections/ibm_zos_core/pull/1647). + diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index e8216dd3e..9d6656ac3 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -47,34 +47,38 @@ data_sets include - When \ :emphasis:`operation=backup`\ , specifies a list of data sets or data set patterns to include in the backup. + When *operation=backup*, specifies a list of data sets or data set patterns to include in the backup. - When \ :emphasis:`operation=restore`\ , specifies a list of data sets or data set patterns to include when restoring from a backup. + When *operation=backup* GDS relative names are supported. - The single asterisk, \ :literal:`\*`\ , is used in place of exactly one qualifier. In addition, it can be used to indicate to DFSMSdss that only part of a qualifier has been specified. + When *operation=restore*, specifies a list of data sets or data set patterns to include when restoring from a backup. - When used with other qualifiers, the double asterisk, \ :literal:`\*\*`\ , indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. + The single asterisk, ``*``, is used in place of exactly one qualifier. In addition, it can be used to indicate to DFSMSdss that only part of a qualifier has been specified. + + When used with other qualifiers, the double asterisk, ``**``, indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. Two asterisks are the maximum permissible in a qualifier. If there are two asterisks in a qualifier, they must be the first and last characters. - A question mark \ :literal:`?`\ or percent sign \ :literal:`%`\ matches a single character. + A question mark ``?`` or percent sign ``%`` matches a single character. | **required**: False | **type**: raw exclude - When \ :emphasis:`operation=backup`\ , specifies a list of data sets or data set patterns to exclude from the backup. + When *operation=backup*, specifies a list of data sets or data set patterns to exclude from the backup. + + When *operation=backup* GDS relative names are supported. - When \ :emphasis:`operation=restore`\ , specifies a list of data sets or data set patterns to exclude when restoring from a backup. + When *operation=restore*, specifies a list of data sets or data set patterns to exclude when restoring from a backup. - The single asterisk, \ :literal:`\*`\ , is used in place of exactly one qualifier. In addition, it can be used to indicate that only part of a qualifier has been specified." + The single asterisk, ``*``, is used in place of exactly one qualifier. In addition, it can be used to indicate that only part of a qualifier has been specified." - When used with other qualifiers, the double asterisk, \ :literal:`\*\*`\ , indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. + When used with other qualifiers, the double asterisk, ``**``, indicates either the nonexistence of leading, trailing, or middle qualifiers, or the fact that they play no role in the selection process. Two asterisks are the maximum permissible in a qualifier. If there are two asterisks in a qualifier, they must be the first and last characters. - A question mark \ :literal:`?`\ or percent sign \ :literal:`%`\ matches a single character. + A question mark ``?`` or percent sign ``%`` matches a single character. | **required**: False | **type**: raw @@ -84,22 +88,22 @@ data_sets volume This applies to both data set restores and volume restores. - When \ :emphasis:`operation=backup`\ and \ :emphasis:`data\_sets`\ are provided, specifies the volume that contains the data sets to backup. + When *operation=backup* and *data_sets* are provided, specifies the volume that contains the data sets to backup. - When \ :emphasis:`operation=restore`\ , specifies the volume the backup should be restored to. + When *operation=restore*, specifies the volume the backup should be restored to. - \ :emphasis:`volume`\ is required when restoring a full volume backup. + *volume* is required when restoring a full volume backup. | **required**: False | **type**: str full_volume - When \ :emphasis:`operation=backup`\ and \ :emphasis:`full\_volume=True`\ , specifies that the entire volume provided to \ :emphasis:`volume`\ should be backed up. + When *operation=backup* and *full_volume=True*, specifies that the entire volume provided to *volume* should be backed up. - When \ :emphasis:`operation=restore`\ and \ :emphasis:`full\_volume=True`\ , specifies that the volume should be restored (default is dataset). + When *operation=restore* and *full_volume=True*, specifies that the volume should be restored (default is dataset). - \ :emphasis:`volume`\ must be provided when \ :emphasis:`full\_volume=True`\ . + *volume* must be provided when *full_volume=True*. | **required**: False | **type**: bool @@ -109,18 +113,20 @@ full_volume temp_volume Specifies a particular volume on which the temporary data sets should be created during the backup and restore process. - When \ :emphasis:`operation=backup`\ and \ :emphasis:`backup\_name`\ is a data set, specifies the volume the backup should be placed in. + When *operation=backup* and *backup_name* is a data set, specifies the volume the backup should be placed in. | **required**: False | **type**: str backup_name - When \ :emphasis:`operation=backup`\ , the destination data set or UNIX file to hold the backup. + When *operation=backup*, the destination data set or UNIX file to hold the backup. + + When *operation=restore*, the destination data set or UNIX file backup to restore. - When \ :emphasis:`operation=restore`\ , the destination data set or UNIX file backup to restore. + There are no enforced conventions for backup names. However, using a common extension like ``.dzp`` for UNIX files and ``.DZP`` for data sets will improve readability. - There are no enforced conventions for backup names. However, using a common extension like \ :literal:`.dzp`\ for UNIX files and \ :literal:`.DZP`\ for data sets will improve readability. + GDS relative names are supported when *operation=restore*. | **required**: True | **type**: str @@ -135,9 +141,9 @@ recover overwrite - When \ :emphasis:`operation=backup`\ , specifies if an existing data set or UNIX file matching \ :emphasis:`backup\_name`\ should be deleted. + When *operation=backup*, specifies if an existing data set or UNIX file matching *backup_name* should be deleted. - When \ :emphasis:`operation=restore`\ , specifies if the module should overwrite existing data sets with matching name on the target device. + When *operation=restore*, specifies if the module should overwrite existing data sets with matching name on the target device. | **required**: False | **type**: bool @@ -145,35 +151,35 @@ overwrite sms_storage_class - When \ :emphasis:`operation=restore`\ , specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. + When *operation=restore*, specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. - When \ :emphasis:`operation=backup`\ , specifies the storage class to use for temporary data sets created during backup process. + When *operation=backup*, specifies the storage class to use for temporary data sets created during backup process. - If neither of \ :emphasis:`sms\_storage\_class`\ or \ :emphasis:`sms\_management\_class`\ are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. | **required**: False | **type**: str sms_management_class - When \ :emphasis:`operation=restore`\ , specifies the management class to use. The management class will also be used for temporary data sets created during restore process. + When *operation=restore*, specifies the management class to use. The management class will also be used for temporary data sets created during restore process. - When \ :emphasis:`operation=backup`\ , specifies the management class to use for temporary data sets created during backup process. + When *operation=backup*, specifies the management class to use for temporary data sets created during backup process. - If neither of \ :emphasis:`sms\_storage\_class`\ or \ :emphasis:`sms\_management\_class`\ are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. | **required**: False | **type**: str space - If \ :emphasis:`operation=backup`\ , specifies the amount of space to allocate for the backup. Please note that even when backing up to a UNIX file, backup contents will be temporarily held in a data set. + If *operation=backup*, specifies the amount of space to allocate for the backup. Please note that even when backing up to a UNIX file, backup contents will be temporarily held in a data set. - If \ :emphasis:`operation=restore`\ , specifies the amount of space to allocate for data sets temporarily created during the restore process. + If *operation=restore*, specifies the amount of space to allocate for data sets temporarily created during the restore process. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. - When \ :emphasis:`full\_volume=True`\ , \ :emphasis:`space`\ defaults to \ :literal:`1`\ , otherwise default is \ :literal:`25`\ + When *full_volume=True*, *space* defaults to ``1``, otherwise default is ``25`` | **required**: False | **type**: int @@ -182,9 +188,9 @@ space space_type The unit of measurement to use when defining data set space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. - When \ :emphasis:`full\_volume=True`\ , \ :emphasis:`space\_type`\ defaults to \ :literal:`g`\ , otherwise default is \ :literal:`m`\ + When *full_volume=True*, *space_type* defaults to ``g``, otherwise default is ``m`` | **required**: False | **type**: str @@ -203,7 +209,7 @@ hlq tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup data sets. - The default HLQ is the Ansible user that executes the module and if that is not available, then the value of \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -235,6 +241,15 @@ Examples exclude: user.private.* backup_name: MY.BACKUP.DZP + - name: Backup a list of GDDs to data set my.backup.dzp + zos_backup_restore: + operation: backup + data_sets: + include: + - user.gdg(-1) + - user.gdg(0) + backup_name: my.backup.dzp + - name: Backup all datasets matching the pattern USER.** to UNIX file /tmp/temp_backup.dzp, ignore recoverable errors. zos_backup_restore: operation: backup @@ -332,6 +347,16 @@ Examples +Notes +----- + +.. note:: + It is the playbook author or user's responsibility to ensure they have appropriate authority to the RACF FACILITY resource class. A user is described as the remote user, configured to run either the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. + + When using this module, if the RACF FACILITY class profile **STGADMIN.ADR.DUMP.TOLERATE.ENQF** is active, you must have READ access authority to use the module option *recover=true*. If the RACF FACILITY class checking is not set up, any user can use the module option without access to the class. + + If your system uses a different security product, consult that product's documentation to configure the required security classes. + diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index c64ed0535..cd3f4b72a 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -197,6 +197,19 @@ that is not available, then the value of C(TMPHLQ) is used. required: false type: str +notes: + - It is the playbook author or user's responsibility to ensure they have + appropriate authority to the RACF FACILITY resource class. A user is + described as the remote user, configured to run either the playbook or + playbook tasks, who can also obtain escalated privileges to execute as + root or another user. + - When using this module, if the RACF FACILITY class + profile B(STGADMIN.ADR.DUMP.TOLERATE.ENQF) is active, you must + have READ access authority to use the module option I(recover=true). + If the RACF FACILITY class checking is not set up, any user can use + the module option without access to the class. + - If your system uses a different security product, consult that product's + documentation to configure the required security classes. """ RETURN = r"""""" diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index fff5bd6aa..aa25110f8 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -828,7 +828,7 @@ def test_backup_into_gds(ansible_zos_module, dstype): assert result.get("changed") is True assert result.get("module_stderr") is None ds_to_write = f"{ds_name}(MEM)" if dstype in ['pds', 'pdse'] else ds_name - results = hosts.all.shell(cmd=f"decho 'test line' \"{ds_to_write}\"") + results = hosts.all.shell(cmd=f"decho 'test line' '{ds_to_write}'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None From b358ade7291dd3037fb7d682d8e60daf305e6e4b Mon Sep 17 00:00:00 2001 From: Ketan Kelkar <ktnklkr@gmail.com> Date: Tue, 13 Aug 2024 09:23:01 -0700 Subject: [PATCH 448/495] Updates for "non-utf8 chars" Deprecation Warning (#1634) * update run_command error algorithm - tsocmd Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update run_command error algorithm - mvsraw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update calls to run_command Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove kwarg in wrapper for run_command Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove extra space char Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch from error algo from backslashreplace to replace Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_mvs_raw func tests for new errors algo Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos mvs raw func tests for non utf8 char depr warning fixes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- ...1634-updates-for-non-utf8-depr-warning.yml | 25 ++++++++++++++ plugins/module_utils/backup.py | 6 ++-- plugins/module_utils/copy.py | 12 +++---- plugins/module_utils/data_set.py | 34 +++++++++---------- plugins/module_utils/encode.py | 14 ++++---- plugins/module_utils/mvs_cmd.py | 2 +- plugins/module_utils/vtoc.py | 2 +- plugins/module_utils/zos_mvs_raw.py | 4 +-- plugins/modules/zos_archive.py | 2 +- plugins/modules/zos_blockinfile.py | 2 +- plugins/modules/zos_copy.py | 2 +- plugins/modules/zos_fetch.py | 2 +- plugins/modules/zos_find.py | 8 ++--- plugins/modules/zos_lineinfile.py | 2 +- plugins/modules/zos_mount.py | 10 +++--- plugins/modules/zos_mvs_raw.py | 1 + plugins/modules/zos_script.py | 3 +- plugins/modules/zos_tso_command.py | 2 +- plugins/modules/zos_unarchive.py | 2 +- .../modules/test_zos_mvs_raw_func.py | 13 +++---- 20 files changed, 88 insertions(+), 60 deletions(-) create mode 100644 changelogs/fragments/1634-updates-for-non-utf8-depr-warning.yml diff --git a/changelogs/fragments/1634-updates-for-non-utf8-depr-warning.yml b/changelogs/fragments/1634-updates-for-non-utf8-depr-warning.yml new file mode 100644 index 000000000..4cb001049 --- /dev/null +++ b/changelogs/fragments/1634-updates-for-non-utf8-depr-warning.yml @@ -0,0 +1,25 @@ +minor_changes: + - zos_mvs_raw - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_script - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_tso_command - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + +trivial: + - zos_archive - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_blockinfile - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_copy - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_fetch - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_find - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_lineinfile - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_mount - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_unarchive - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. + (https://github.com/ansible-collections/ibm_zos_core/pull/1634). \ No newline at end of file diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index b881d6321..8499361b6 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -201,7 +201,7 @@ def uss_file_backup(path, backup_name=None, compress=False): if backup_name_provided and os.path.isdir(backup_name): backup_name += backup_base bk_cmd = "tar -cpf {0}.tar {1}".format(quote(backup_name), quote(abs_path)) - rc, out, err = module.run_command(bk_cmd) + rc, out, err = module.run_command(bk_cmd, errors='replace') if rc: raise BackupError(err) backup_name += ".tar" @@ -247,7 +247,7 @@ def _copy_ds(ds, bk_ds): ds, bk_ds ) rc, out, err = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=repro_cmd + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=repro_cmd, errors='replace' ) if rc != 0 and rc != 12: datasets.delete(bk_ds) @@ -288,7 +288,7 @@ def _allocate_model(ds, model): ds, model ) cmd = "mvscmdauth --pgm=ikjeft01 --systsprt=* --systsin=stdin" - rc, out, err = module.run_command(cmd, data=alloc_cmd) + rc, out, err = module.run_command(cmd, data=alloc_cmd, errors='replace') if rc != 0: raise BackupError( "Unable to allocate data set {0}; stdout: {1}; stderr: {2}".format( diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index c42482b4f..499aecbd9 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -119,7 +119,7 @@ def copy_uss2mvs(src, dest, ds_type, is_binary=False): cp_uss2mvs = "cp -F rec {0} \"//'{1}'\"".format(quote(src), dest) if is_binary: cp_uss2mvs = cp_uss2mvs.replace("rec", "bin", 1) - rc, out, err = module.run_command(cp_uss2mvs) + rc, out, err = module.run_command(cp_uss2mvs, errors='replace') if rc: raise USSCmdExecError(cp_uss2mvs, rc, out, err) return rc, out, err @@ -161,7 +161,7 @@ def copy_ps2uss(src, dest, is_binary=False): cp_ps2uss = "cp -F rec \"//'{0}'\" {1}".format(src, quote(dest)) if is_binary: cp_ps2uss = cp_ps2uss.replace("rec", "bin", 1) - rc, out, err = module.run_command(cp_ps2uss) + rc, out, err = module.run_command(cp_ps2uss, errors='replace') if rc: raise USSCmdExecError(cp_ps2uss, rc, out, err) return rc, out, err @@ -214,7 +214,7 @@ def copy_pds2uss(src, dest, is_binary=False, asa_text=False): elif is_binary: cp_pds2uss = cp_pds2uss.replace("rec", "bin", 1) - rc, out, err = module.run_command(cp_pds2uss) + rc, out, err = module.run_command(cp_pds2uss, errors='replace') if rc: raise USSCmdExecError(cp_pds2uss, rc, out, err) @@ -292,7 +292,7 @@ def copy_uss2uss_binary(src, dest): src = _validate_path(src) dest = _validate_path(dest) cp_uss2uss = "cp -F bin {0} {1}".format(quote(src), quote(dest)) - rc, out, err = module.run_command(cp_uss2uss) + rc, out, err = module.run_command(cp_uss2uss, errors='replace') if rc: raise USSCmdExecError(cp_uss2uss, rc, out, err) return rc, out, err @@ -333,7 +333,7 @@ def copy_mvs2mvs(src, dest, is_binary=False): cp_mvs2mvs = "cp -F rec \"//'{0}'\" \"//'{1}'\"".format(src, dest) if is_binary: cp_mvs2mvs = cp_mvs2mvs.replace("rec", "bin", 1) - rc, out, err = module.run_command(cp_mvs2mvs) + rc, out, err = module.run_command(cp_mvs2mvs, errors='replace') if rc: raise USSCmdExecError(cp_mvs2mvs, rc, out, err) return rc, out, err @@ -368,7 +368,7 @@ def copy_vsam_ps(src, dest): dest = _validate_data_set_name(dest) repro_cmd = REPRO.format(src, dest) cmd = "mvscmdauth --pgm=idcams --sysprint=stdout --sysin=stdin" - rc, out, err = module.run_command(cmd, data=repro_cmd) + rc, out, err = module.run_command(cmd, data=repro_cmd, errors='replace') if rc: raise USSCmdExecError(cmd, rc, out, err) return rc, out, err diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 60cf56061..d3d8123c3 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -439,7 +439,7 @@ def data_set_cataloged(name, volumes=None): module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name) rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' ) # The above 'listcat entries' command to idcams returns: @@ -473,7 +473,7 @@ def data_set_cataloged_volume_list(name): module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}') ALL".format(name) rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' ) # The above 'listcat entries all' command to idcams returns: # rc=0 if data set found in catalog @@ -524,7 +524,7 @@ def data_set_member_exists(name): """ module = AnsibleModuleHelper(argument_spec={}) rc, stdout, stderr = module.run_command( - "head \"//'{0}'\"".format(name)) + "head \"//'{0}'\"".format(name), errors='replace') if rc != 0 or (stderr and "EDC5067I" in stderr): return False return True @@ -690,7 +690,7 @@ def _get_listcat_data(name): module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENT('{0}') DATA ALL".format(name) rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' ) if rc != 0: @@ -718,7 +718,7 @@ def is_empty(name, volume=None): return DataSet._pds_empty(name) elif ds_type in DataSet.MVS_SEQ: module = AnsibleModuleHelper(argument_spec={}) - rc, stdout, stderr = module.run_command("head \"//'{0}'\"".format(name)) + rc, stdout, stderr = module.run_command("head \"//'{0}'\"".format(name), errors='replace') return rc == 0 and len(stdout.strip()) == 0 elif ds_type in DataSet.MVS_VSAM: return DataSet._vsam_empty(name) @@ -736,7 +736,7 @@ def _pds_empty(name): """ module = AnsibleModuleHelper(argument_spec={}) ls_cmd = "mls {0}".format(name) - rc, out, err = module.run_command(ls_cmd) + rc, out, err = module.run_command(ls_cmd, errors='replace') # RC 2 for mls means that there aren't any members. return rc == 2 @@ -759,7 +759,7 @@ def _vsam_empty(name): rc, out, err = module.run_command( "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin --mydset={0}".format( name), - data=empty_cmd, + data=empty_cmd, errors='replace' ) if rc == 4 or "VSAM OPEN RETURN CODE IS 160" in out: return True @@ -1178,7 +1178,7 @@ def create_member(name): raise DatasetNotFoundError(name) tmp_file = tempfile.NamedTemporaryFile(delete=True) rc, stdout, stderr = module.run_command( - "cp {0} \"//'{1}'\"".format(tmp_file.name, name) + "cp {0} \"//'{1}'\"".format(tmp_file.name, name), errors='replace' ) if rc != 0: raise DatasetMemberCreateError(name, rc) @@ -1228,7 +1228,7 @@ def _catalog_non_vsam(name, volumes): name.upper(), volumes) rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=iehprogm --sysprint=* --sysin=stdin", data=iehprogm_input + "mvscmdauth --pgm=iehprogm --sysprint=* --sysin=stdin", data=iehprogm_input, errors='replace' ) if rc != 0 or "NORMAL END OF TASK RETURNED" not in stdout: raise DatasetCatalogError(name, volumes, rc) @@ -1277,7 +1277,7 @@ def _catalog_vsam(name, volumes): ) command_rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command) + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command, errors='replace') if command_rc == 0: success = True @@ -1292,7 +1292,7 @@ def _catalog_vsam(name, volumes): ) command_rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command) + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command, errors='replace') if command_rc == 0: success = True @@ -1336,7 +1336,7 @@ def _uncatalog_non_vsam(name): DataSet.write(temp_name, iehprogm_input) rc, stdout, stderr = module.run_command( "mvscmdauth --pgm=iehprogm --sysprint=* --sysin={0}".format( - temp_name) + temp_name), errors='replace' ) if rc != 0 or "NORMAL END OF TASK RETURNED" not in stdout: raise DatasetUncatalogError(name, rc) @@ -1359,7 +1359,7 @@ def _uncatalog_vsam(name): idcams_input = DataSet._VSAM_UNCATALOG_COMMAND.format(name) rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=idcams_input + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=idcams_input, errors='replace' ) if rc != 0: @@ -1419,7 +1419,7 @@ def _is_vsam_from_listcat(name): module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name.upper()) rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin + "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' ) if re.search(r"^0CLUSTER[ ]+-+[ ]+" + name + r"[ ]*$", stdout, re.MULTILINE): return True @@ -1592,7 +1592,7 @@ def format_zfs(name): """ module = AnsibleModuleHelper(argument_spec={}) rc, stdout, stderr = module.run_command( - "zfsadm format -aggregate {0}".format(name) + "zfsadm format -aggregate {0}".format(name), errors='replace' ) if rc != 0: raise DatasetFormatError( @@ -1615,7 +1615,7 @@ def write(name, contents): with open(temp.name, "w") as f: f.write(contents) rc, stdout, stderr = module.run_command( - "cp -O u {0} \"//'{1}'\"".format(temp.name, name) + "cp -O u {0} \"//'{1}'\"".format(temp.name, name), errors='replace' ) if rc != 0: raise DatasetWriteError(name, rc) @@ -1740,7 +1740,7 @@ def member_exists(self, member): """ if self.ds_type() == "PO": rc, out, err = self.module.run_command( - "head \"//'{0}({1})'\"".format(self.data_set, member) + "head \"//'{0}({1})'\"".format(self.data_set, member), errors='replace' ) if rc == 0 and not re.findall(r"EDC5067I", err): return True diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 895cfb785..606a2a189 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -61,7 +61,7 @@ def get_default_system_charset(): system_charset = locale.getdefaultlocale()[1] if system_charset is None: module = AnsibleModuleHelper(argument_spec={}) - rc, out, err = module.run_command("locale -c charmap") + rc, out, err = module.run_command("locale -c charmap", errors='replace') if rc != 0 or not out or err: if system.is_zos(): system_charset = Defaults.DEFAULT_EBCDIC_USS_CHARSET @@ -188,7 +188,7 @@ def listdsi_data_set(self, ds): space_u = 1024 listcat_cmd = " LISTCAT ENT('{0}') ALL".format(ds) cmd = "mvscmdauth --pgm=ikjeft01 --systsprt=stdout --systsin=stdin" - rc, out, err = self.module.run_command(cmd, data=listcat_cmd) + rc, out, err = self.module.run_command(cmd, data=listcat_cmd, errors='replace') if rc: raise EncodeError(err) if out: @@ -279,7 +279,7 @@ def get_codeset(self): """ code_set = None iconv_list_cmd = ["iconv", "-l"] - rc, out, err = self.module.run_command(iconv_list_cmd) + rc, out, err = self.module.run_command(iconv_list_cmd, errors='replace') if rc: raise EncodeError(err) if out: @@ -314,7 +314,7 @@ def string_convert_encoding(self, src, from_encoding, to_encoding): iconv_cmd = "printf {0} | iconv -f {1} -t {2}".format( quote(src), quote(from_encoding), quote(to_encoding) ) - rc, out, err = self.module.run_command(iconv_cmd, use_unsafe_shell=True) + rc, out, err = self.module.run_command(iconv_cmd, use_unsafe_shell=True, errors='replace') if rc: raise EncodeError(err) return out @@ -359,7 +359,7 @@ def uss_convert_encoding(self, src, dest, from_code, to_code): quote(from_code), quote(to_code), quote(src), quote(temp_fi) ) try: - rc, out, err = self.module.run_command(iconv_cmd, use_unsafe_shell=True) + rc, out, err = self.module.run_command(iconv_cmd, use_unsafe_shell=True, errors='replace') if rc: raise EncodeError(err) if dest == temp_fi: @@ -576,7 +576,7 @@ def uss_tag_encoding(self, file_path, tag): is_dir = os.path.isdir(file_path) tag_cmd = "chtag -{0}c {1} {2}".format("R" if is_dir else "t", tag, file_path) - rc, out, err = self.module.run_command(tag_cmd) + rc, out, err = self.module.run_command(tag_cmd, errors='replace') if rc != 0: raise TaggingError(file_path, tag, rc, out, err) @@ -600,7 +600,7 @@ def uss_file_tag(self, file_path): try: tag_cmd = "ls -T {0}".format(file_path) - rc, stdout, stderr = self.module.run_command(tag_cmd) + rc, stdout, stderr = self.module.run_command(tag_cmd, errors='replace') if rc != 0: return None diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index b24bdaf5b..560184477 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -301,4 +301,4 @@ def _run_mvs_command(pgm, cmd, dd=None, authorized=False): for k, v in dd.items(): mvscmd += " --{0}={1}".format(k, v) - return module.run_command(mvscmd, data=cmd) + return module.run_command(mvscmd, data=cmd, errors='replace') diff --git a/plugins/module_utils/vtoc.py b/plugins/module_utils/vtoc.py index 12cd25656..309d73c1e 100644 --- a/plugins/module_utils/vtoc.py +++ b/plugins/module_utils/vtoc.py @@ -121,7 +121,7 @@ def _iehlist(dd, stdin): response = None rc, stdout, stderr = module.run_command( "mvscmd --pgm=iehlist --sysprint=* --dd={0} --sysin=stdin ".format(dd), - data=stdin, + data=stdin, errors='replace' ) if rc == 0: response = stdout diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index d6251a69d..74e67ddd8 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -59,7 +59,7 @@ def execute(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=None): "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) - rc, out, err = module.run_command(command) + rc, out, err = module.run_command(command, errors='replace') if rc == 0 and verbose: out = err return MVSCmdResponse(rc, out, err) @@ -91,7 +91,7 @@ def execute_authorized(pgm, dds, parm="", debug=False, verbose=False, tmp_hlq=No "--tmphlq={0}".format(tmp_hlq.upper()) if tmp_hlq else "", MVSCmd._build_command(pgm, dds, parm), ) - rc, out, err = module.run_command(command) + rc, out, err = module.run_command(command, errors='replace') if rc == 0 and verbose: out = err return MVSCmdResponse(rc, out, err) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 08e2111a9..b9c825902 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1219,7 +1219,7 @@ def _get_checksums(self, src): The SHA256 hash of the contents of input file. """ sha256_cmd = "sha256 \"//'{0}'\"".format(src) - rc, out, err = self.module.run_command(sha256_cmd) + rc, out, err = self.module.run_command(sha256_cmd, errors='replace') checksums = out.split("= ") if len(checksums) > 0: return checksums[1] diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 8c1485152..a5fd05f45 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -583,7 +583,7 @@ def execute_dmod(src, block, marker, force, encoding, state, module, ins_bef=Non else: cmd = """dmod -b {0} {1} {2} {3}""".format(force, encoding, marker, src) - rc, stdout, stderr = module.run_command(cmd) + rc, stdout, stderr = module.run_command(cmd, errors='replace') cmd = clean_command(cmd) return rc, cmd diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 80a9a034f..3c61e40c7 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1042,7 +1042,7 @@ def run_command(self, cmd, **kwargs): tuple(int, str, str) A tuple of return code, stdout and stderr. """ - return self.module.run_command(cmd, **kwargs) + return self.module.run_command(cmd, errors='replace', **kwargs) def copy_to_seq( self, diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 9d22b58b6..92f1086fd 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -350,7 +350,7 @@ def _run_command(self, cmd, **kwargs): tuple(int,str,str) Return code, standard output and standard error. """ - return self.module.run_command(cmd, **kwargs) + return self.module.run_command(cmd, errors='replace', **kwargs) def _get_vsam_size(self, vsam): """Invoke IDCAMS LISTCAT command to get the record length and space used. diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index de272bfd0..4bea0539d 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -420,7 +420,7 @@ def data_set_filter(module, pds_paths, patterns): if result[1] == "PO": if pds_paths: mls_rc, mls_out, mls_err = module.run_command( - "mls '{0}(*)'".format(result[0]) + "mls '{0}(*)'".format(result[0]), errors='replace' ) if mls_rc == 2: filtered_data_sets["pds"][result[0]] = {} @@ -876,7 +876,7 @@ def _dgrep_wrapper( dgrep_cmd += " -C{0}".format(context) dgrep_cmd += " {0} {1}".format(quote(content), quote(data_set_pattern)) - return AnsibleModuleHelper(argument_spec={}).run_command(dgrep_cmd) + return AnsibleModuleHelper(argument_spec={}).run_command(dgrep_cmd, errors='replace') def _dls_wrapper( @@ -933,7 +933,7 @@ def _dls_wrapper( dls_cmd += " -j" dls_cmd += " {0}".format(quote(data_set_pattern)) - return AnsibleModuleHelper(argument_spec={}).run_command(dls_cmd) + return AnsibleModuleHelper(argument_spec={}).run_command(dls_cmd, errors='replace') def _vls_wrapper(pattern, details=False, verbose=False): @@ -960,7 +960,7 @@ def _vls_wrapper(pattern, details=False, verbose=False): vls_cmd += " -v" vls_cmd += " {0}".format(quote(pattern)) - return AnsibleModuleHelper(argument_spec={}).run_command(vls_cmd) + return AnsibleModuleHelper(argument_spec={}).run_command(vls_cmd, errors='replace') def _match_resource_type(type1, type2): diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index 38fb5d116..d3aa3b6b6 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -483,7 +483,7 @@ def execute_dsed(src, state, encoding, module, line=False, first_match=False, fo cmd = "dsed {0}{1}{2}{3}".format(force, backrefs, encoding, options) - rc, stdout, stderr = module.run_command(cmd) + rc, stdout, stderr = module.run_command(cmd, errors='replace') cmd = clean_command_output(cmd) return rc, cmd, stdout diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index cabf94b0f..85f4638aa 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -846,7 +846,7 @@ def run_module(module, arg_def): # Need to see if mountpoint is in use for idempotence currently_mounted = False - rc, stdout, stderr = module.run_command("df", use_unsafe_shell=False) + rc, stdout, stderr = module.run_command("df", use_unsafe_shell=False, errors='replace') if rc != 0: module.fail_json( @@ -1003,7 +1003,7 @@ def run_module(module, arg_def): # ) fullumcmd = "tsocmd " + fullumcmd (rc, stdout, stderr) = module.run_command( - fullumcmd, use_unsafe_shell=False + fullumcmd, use_unsafe_shell=False, errors='replace' ) currently_mounted = False except Exception as err: @@ -1022,7 +1022,7 @@ def run_module(module, arg_def): # ) fullcmd = "tsocmd " + fullcmd (rc, stdout, stderr) = module.run_command( - fullcmd, use_unsafe_shell=False + fullcmd, use_unsafe_shell=False, errors='replace' ) except Exception as err: msg = "Exception occurrend when running mount: {0}".format(str(err)) @@ -1048,7 +1048,7 @@ def run_module(module, arg_def): copy_ps2uss(data_store, tmp_file_filename, False) module.run_command( - "chtag -tc ISO8859-1 " + tmp_file_filename, use_unsafe_shell=False + "chtag -tc ISO8859-1 " + tmp_file_filename, use_unsafe_shell=False, errors='replace' ) with open(tmp_file_filename, "r") as fh: @@ -1074,7 +1074,7 @@ def run_module(module, arg_def): fh.close() # pre-clear to prevent caching behavior on the copy-back module.run_command( - "mrm " + data_store, use_unsafe_shell=False + "mrm " + data_store, use_unsafe_shell=False, errors='replace' ) copy_uss2mvs(tmp_file_filename, data_store, "", True) diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index b382baf25..e3c8d4c6d 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -3114,6 +3114,7 @@ def get_content(formatted_name, binary=False, from_encoding=None, to_encoding=No "cat {0}{1}".format(formatted_name, conversion_command), use_unsafe_shell=True, environ_update=ENCODING_ENVIRONMENT_VARS, + errors='replace' ) if rc: return "" diff --git a/plugins/modules/zos_script.py b/plugins/modules/zos_script.py index b07853617..a9da395c4 100644 --- a/plugins/modules/zos_script.py +++ b/plugins/modules/zos_script.py @@ -372,7 +372,8 @@ def run_module(): cmd_str = cmd_str.strip() script_rc, stdout, stderr = module.run_command( cmd_str, - cwd=chdir + cwd=chdir, + errors='replace' ) result = dict( diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 017e88cf1..51bc2377a 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -196,7 +196,7 @@ def copy_rexx_and_run_commands(script, commands, module, max_rc): f.write(script) chmod(tmp_file.name, S_IEXEC | S_IREAD | S_IWRITE) for command in commands: - rc, stdout, stderr = module.run_command([tmp_file.name, command]) + rc, stdout, stderr = module.run_command([tmp_file.name, command], errors='replace') command_results = {} command_results["command"] = command command_results["rc"] = rc diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 43312f449..258d9972b 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -831,7 +831,7 @@ def extract_src(self): def _list_content(self, source): restore_cmd = " RESTORE INDD(ARCHIVE) DS(INCL(**)) " cmd = " mvscmdauth --pgm=ADRDSSU --archive={0},old --args='TYPRUN=NORUN' --sysin=stdin --sysprint=*".format(source) - rc, out, err = self.module.run_command(cmd, data=restore_cmd) + rc, out, err = self.module.run_command(cmd, data=restore_cmd, errors='replace') self._get_restored_datasets(out) def list_archive_content(self): diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index 00dd56e31..230367175 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -563,7 +563,7 @@ def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): ("text", "IDCAMS SYSTEM"), ( "base64", - "\udcc9\udcc4\udcc3\udcc1\udcd4\udce2@@\udce2\udce8\udce2\udce3\udcc5", + "������@@������", ), ], ) @@ -620,7 +620,7 @@ def test_return_content_type(ansible_zos_module, return_content_type, expected, @pytest.mark.parametrize( "src_encoding,response_encoding,expected", [ - ("iso8859-1", "ibm-1047", "qcfe\udcebB||BTBFg\udceb|Bg\udcfdGqfgB"), + ("iso8859-1", "ibm-1047", "qcfe�B||BTBFg�|Bg�GqfgB||"), ( "ibm-1047", "iso8859-1", @@ -1096,7 +1096,7 @@ def test_input_provided_as_list(ansible_zos_module): ("text", "LISTCAT ENTRIES"), ( "base64", - "@\udcd3\udcc9\udce2\udce3\udcc3\udcc1\udce3@\udcc5\udcd5\udce3\udcd9\udcc9\udcc5", + "@�������@�������", ), ], ) @@ -1147,7 +1147,8 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp ( "iso8859-1", "ibm-1047", - "|\udceeqBFfeF|g\udcefF\udcfdqgB\udcd4\udcd0", + "|�qBFfeF|g�F�qgB��", + ), ( "ibm-1047", @@ -1568,7 +1569,7 @@ def test_file_record_format(ansible_zos_module, record_format): ("text", "IDCAMS SYSTEM"), ( "base64", - "@\udcd3\udcc9\udce2\udce3\udcc3\udcc1\udce3@\udcc5\udcd5\udce3\udcd9\udcc9\udcc5", + "�������@@������@��������@", ), ], ) @@ -1614,7 +1615,7 @@ def test_file_return_content(ansible_zos_module, return_content_type, expected): @pytest.mark.parametrize( "src_encoding,response_encoding,expected", [ - ("iso8859-1", "ibm-1047", "qcfe\udcebB||BTBFg\udceb|Bg\udcfdGqfgB"), + ("iso8859-1", "ibm-1047", "qcfe�B||BTBFg�|Bg�GqfgB|"), ( "ibm-1047", "iso8859-1", From e1a3efb962c6f9803e743e92bc7b90d57d493479 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 13 Aug 2024 11:07:11 -0600 Subject: [PATCH 449/495] [Enabler] [zos_apf] Change temporary HLQ used in tests (#1654) * Change temporary HLQ used * Add changelog fragment --- .../1654-zos_apf_tests_change_temphlq.yml | 4 +++ tests/functional/modules/test_zos_apf_func.py | 27 ++++++++++--------- 2 files changed, 19 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1654-zos_apf_tests_change_temphlq.yml diff --git a/changelogs/fragments/1654-zos_apf_tests_change_temphlq.yml b/changelogs/fragments/1654-zos_apf_tests_change_temphlq.yml new file mode 100644 index 000000000..c36a77176 --- /dev/null +++ b/changelogs/fragments/1654-zos_apf_tests_change_temphlq.yml @@ -0,0 +1,4 @@ +trivial: + - test_zos_apf_func - Change temp HLQ used in the test suite for a + standardized one. + (https://github.com/ansible-collections/ibm_zos_core/pull/1654). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 4bb0e9041..918a4d36c 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -41,6 +41,9 @@ DEL_EXPECTED = """/*BEGINAPFLIST*/ /*ENDAPFLIST*/""" +TEST_HLQ = "ANSIBLE" + + def clean_test_env(hosts, test_info): cmd_str = f"drm '{test_info['library']}' " hosts.all.shell(cmd=cmd_str) @@ -69,7 +72,7 @@ def test_add_del(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -115,7 +118,7 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -157,7 +160,7 @@ def test_add_del_volume(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -230,7 +233,7 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -380,7 +383,7 @@ def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -429,7 +432,7 @@ def test_add_already_present(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -470,7 +473,7 @@ def test_del_not_present(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -494,7 +497,7 @@ def test_add_not_found(ansible_zos_module): "state":"present", "force_dynamic":True } - test_info['library'] = 'APFTEST.FOO.BAR' + test_info['library'] = f'{TEST_HLQ}.FOO.BAR' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 @@ -523,7 +526,7 @@ def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -565,7 +568,7 @@ def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -608,7 +611,7 @@ def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") @@ -649,7 +652,7 @@ def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): vol = result.get("stdout") test_info['volume'] = vol if test_info.get('persistent'): - cmd_str = "mvstmp APFTEST.PRST" + cmd_str = f"mvstmp {TEST_HLQ}.PRST" results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): prstds = result.get("stdout") From f42d1b917a4d80a8b681f29f8bc4fab27ad4d315 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 13 Aug 2024 13:01:51 -0600 Subject: [PATCH 450/495] [Enabler][1595]Ensure_portability_zos_encode (#1642) * Validate encode not mesh * Fix replace * Fix test names * Fix bad reference * Fix positional * Fix creation * Add fragment * Fix encode unique names of files * Delete all hard coded folders names * Push random --- .../1642-Ensure_portability_zos_encode.yml | 4 + .../modules/test_zos_encode_func.py | 707 +++++++++--------- 2 files changed, 373 insertions(+), 338 deletions(-) create mode 100644 changelogs/fragments/1642-Ensure_portability_zos_encode.yml diff --git a/changelogs/fragments/1642-Ensure_portability_zos_encode.yml b/changelogs/fragments/1642-Ensure_portability_zos_encode.yml new file mode 100644 index 000000000..d1dd95b6e --- /dev/null +++ b/changelogs/fragments/1642-Ensure_portability_zos_encode.yml @@ -0,0 +1,4 @@ +trivial: + - test_zos_encode_func.py - Remove the use of hard coded dataset + and file names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1642). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 9e7d40041..85f977660 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -14,28 +14,22 @@ from __future__ import absolute_import, division, print_function from os import path from shellescape import quote +from datetime import datetime # pylint: disable-next=import-error from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name import pytest +import string +import random import re __metaclass__ = type -SHELL_EXECUTABLE = "/bin/sh" -USS_FILE = "/tmp/encode_data" USS_NONE_FILE = "/tmp/none" -USS_DEST_FILE = "/tmp/converted_data" -USS_PATH = "/tmp/src" -USS_DEST_PATH = "/tmp/dest" -MVS_PS = "encode.ps" -MVS_NONE_PS = "encode.none.ps" -MVS_PDS = "encode.pds" -MVS_PDS_MEMBER = "encode.pds(test)" -MVS_VS = "encode.test.vs" +SHELL_EXECUTABLE = "/bin/sh" FROM_ENCODING = "IBM-1047" INVALID_ENCODING = "EBCDIC" TO_ENCODING = "ISO8859-1" -TEMP_JCL_PATH = "/tmp/jcl" + TEST_DATA = """0001 This is for encode conversion testing_____________________________________ 0002 This is for encode conversion testing_____________________________________ 0003 This is for encode conversion testing_____________________________________ @@ -44,8 +38,7 @@ 0006 This is for encode conversion testing_____________________________________ """ TEST_DATA_RECORD_LENGTH = 80 -TEST_FILE_TEXT = "HELLO world" -BACKUP_DATA_SET = "USER.PRIVATE.BACK" +TEST_FILE_TEXT = "HELLO WORLD" KSDS_CREATE_JCL = """//CREKSDS JOB (T043JM,JM00,1,0,0,0),'CREATE KSDS',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=OMVSADM @@ -85,6 +78,12 @@ 00000003A record """ + +def get_unique_uss_file_name(): + unique_str = "EN" + datetime.now().strftime("%H:%M:%S").replace("-", "").replace(":", "") + "CODE" + random.choice(string.ascii_letters) + return "/tmp/{0}".format(unique_str) + + def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, key_offset=None): """Creates a new VSAM on the system. @@ -120,10 +119,11 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, def test_uss_encoding_conversion_with_invalid_encoding(ansible_zos_module): hosts = ansible_zos_module + uss_file = get_unique_uss_file_name() try: - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + hosts.all.copy(content=TEST_DATA, dest=uss_file) results = hosts.all.zos_encode( - src=USS_FILE, + src=uss_file, encoding={ "from": INVALID_ENCODING, "to": TO_ENCODING, @@ -134,14 +134,15 @@ def test_uss_encoding_conversion_with_invalid_encoding(ansible_zos_module): assert result.get("backup_name") is None assert result.get("changed") is False finally: - hosts.all.file(path=USS_FILE, state="absent") + hosts.all.file(path=uss_file, state="absent") def test_uss_encoding_conversion_with_the_same_encoding(ansible_zos_module): hosts = ansible_zos_module - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + uss_file = get_unique_uss_file_name() + hosts.all.copy(content=TEST_DATA, dest=uss_file) results = hosts.all.zos_encode( - src=USS_FILE, + src=uss_file, encoding={ "from": FROM_ENCODING, "to": FROM_ENCODING, @@ -151,40 +152,42 @@ def test_uss_encoding_conversion_with_the_same_encoding(ansible_zos_module): assert result.get("msg") is not None assert result.get("backup_name") is None assert result.get("changed") is False - hosts.all.file(path=USS_FILE, state="absent") + hosts.all.file(path=uss_file, state="absent") def test_uss_encoding_conversion_without_dest(ansible_zos_module): + uss_file = get_unique_uss_file_name() try: hosts = ansible_zos_module - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + hosts.all.copy(content=TEST_DATA, dest=uss_file) results = hosts.all.zos_encode( - src=USS_FILE, + src=uss_file, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == USS_FILE - assert result.get("dest") == USS_FILE + assert result.get("src") == uss_file + assert result.get("dest") == uss_file assert result.get("backup_name") is None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd=f"ls -T {USS_FILE}") + tag_results = hosts.all.shell(cmd=f"ls -T {uss_file}") for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: - hosts.all.file(path=USS_FILE, state="absent") + hosts.all.file(path=uss_file, state="absent") def test_uss_encoding_conversion_when_dest_not_exists_01(ansible_zos_module): + uss_file = get_unique_uss_file_name() try: hosts = ansible_zos_module - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + hosts.all.copy(content=TEST_DATA, dest=uss_file) hosts.all.file(path=USS_NONE_FILE, state="absent") results = hosts.all.zos_encode( - src=USS_FILE, + src=uss_file, dest=USS_NONE_FILE, encoding={ "from": FROM_ENCODING, @@ -192,7 +195,7 @@ def test_uss_encoding_conversion_when_dest_not_exists_01(ansible_zos_module): }, ) for result in results.contacted.values(): - assert result.get("src") == USS_FILE + assert result.get("src") == uss_file assert result.get("dest") == USS_NONE_FILE assert result.get("backup_name") is None assert result.get("changed") is True @@ -201,98 +204,104 @@ def test_uss_encoding_conversion_when_dest_not_exists_01(ansible_zos_module): for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: - hosts.all.file(path=USS_FILE, state="absent") + hosts.all.file(path=uss_file, state="absent") hosts.all.file(path=USS_NONE_FILE, state="absent") def test_uss_encoding_conversion_when_dest_not_exists_02(ansible_zos_module): hosts = ansible_zos_module - MVS_PS = get_tmp_ds_name() - MVS_NONE_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.zos_data_set(name=MVS_NONE_PS, state="absent") + mvs_ps = get_tmp_ds_name() + mvs_none_ps = get_tmp_ds_name() + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") + hosts.all.zos_data_set(name=mvs_none_ps, state="absent") results = hosts.all.zos_encode( - src=MVS_PS, - dest=MVS_NONE_PS, + src=mvs_ps, + dest=mvs_none_ps, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == MVS_PS - assert result.get("dest") == MVS_NONE_PS + assert result.get("src") == mvs_ps + assert result.get("dest") == mvs_none_ps assert result.get("backup_name") is None assert result.get("changed") is False - hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=MVS_NONE_PS, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_none_ps, state="absent") def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): + uss_file = get_unique_uss_file_name() + uss_dest_file = get_unique_uss_file_name() try: hosts = ansible_zos_module - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) - hosts.all.copy(content="test", dest=USS_DEST_FILE) + hosts.all.copy(content=TEST_DATA, dest=uss_file) + hosts.all.copy(content="test", dest=uss_dest_file) results = hosts.all.zos_encode( - src=USS_FILE, - dest=USS_DEST_FILE, + src=uss_file, + dest=uss_dest_file, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == USS_FILE - assert result.get("dest") == USS_DEST_FILE + assert result.get("src") == uss_file + assert result.get("dest") == uss_dest_file assert result.get("backup_name") is None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_FILE}") + tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): assert FROM_ENCODING in result.get("stdout") finally: - hosts.all.file(path=USS_FILE, state="absent") - hosts.all.file(path=USS_DEST_FILE, state="absent") + hosts.all.file(path=uss_file, state="absent") + hosts.all.file(path=uss_dest_file, state="absent") def test_uss_encoding_conversion_uss_file_to_uss_path(ansible_zos_module): + uss_file = get_unique_uss_file_name() + uss_dest_path = get_unique_uss_file_name() try: hosts = ansible_zos_module - hosts.all.file(path=USS_DEST_PATH, state="directory") - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + hosts.all.file(path=uss_dest_path, state="directory") + hosts.all.copy(content=TEST_DATA, dest=uss_file) results = hosts.all.zos_encode( - src=USS_FILE, - dest=USS_DEST_PATH, + src=uss_file, + dest=uss_dest_path, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == USS_FILE - assert result.get("dest") == USS_DEST_PATH + assert result.get("src") == uss_file + assert result.get("dest") == uss_dest_path assert result.get("backup_name") is None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_PATH}/{path.basename(USS_FILE)}") + tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_path}/{path.basename(uss_file)}") for result in tag_results.contacted.values(): assert FROM_ENCODING in result.get("stdout") finally: - hosts.all.file(path=USS_FILE, state="absent") - hosts.all.file(path=USS_DEST_PATH, state="absent") + hosts.all.file(path=uss_file, state="absent") + hosts.all.file(path=uss_dest_path, state="absent") def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.file(path=USS_PATH, state="directory") - hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode1") - hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode2") - hosts.all.file(path=USS_DEST_PATH, state="directory") + uss_path = get_unique_uss_file_name() + uss_dest_path = get_unique_uss_file_name() + hosts.all.file(path=uss_path, state="directory") + hosts.all.copy(content=TEST_DATA, dest=uss_path + "/encode1") + hosts.all.copy(content=TEST_DATA, dest=uss_path + "/encode2") + hosts.all.file(path=uss_dest_path, state="directory") results = hosts.all.zos_encode( - src=USS_PATH, - dest=USS_DEST_PATH, + src=uss_path, + dest=uss_dest_path, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, @@ -300,56 +309,58 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): backup=True, ) for result in results.contacted.values(): - assert result.get("src") == USS_PATH - assert result.get("dest") == USS_DEST_PATH + assert result.get("src") == uss_path + assert result.get("dest") == uss_dest_path assert result.get("backup_name") is not None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_PATH}") + tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_path}") for result in tag_results.contacted.values(): assert FROM_ENCODING in result.get("stdout") assert TO_ENCODING not in result.get("stdout") assert "untagged" not in result.get("stdout") finally: - hosts.all.file(path=USS_PATH, state="absent") - hosts.all.file(path=USS_DEST_PATH, state="absent") + hosts.all.file(path=uss_path, state="absent") + hosts.all.file(path=uss_dest_path, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): + uss_file = get_unique_uss_file_name() try: hosts = ansible_zos_module - MVS_PS = get_tmp_ds_name() - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) - hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") + mvs_ps = get_tmp_ds_name() + hosts.all.copy(content=TEST_DATA, dest=uss_file) + hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") results = hosts.all.zos_encode( - src=USS_FILE, - dest=MVS_PS, + src=uss_file, + dest=mvs_ps, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == USS_FILE - assert result.get("dest") == MVS_PS + assert result.get("src") == uss_file + assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True finally: - hosts.all.file(path=USS_FILE, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="absent") + hosts.all.file(path=uss_file, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): + uss_dest_file = get_unique_uss_file_name() try: hosts = ansible_zos_module - MVS_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.copy(content=TEST_DATA, dest=MVS_PS) - hosts.all.copy(content="test", dest=USS_DEST_FILE) + mvs_ps = get_tmp_ds_name() + hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") + hosts.all.copy(content=TEST_DATA, dest=mvs_ps) + hosts.all.copy(content="test", dest=uss_dest_file) results = hosts.all.zos_encode( - src=MVS_PS, - dest=USS_DEST_FILE, + src=mvs_ps, + dest=uss_dest_file, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, @@ -357,105 +368,105 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): backup=True, ) for result in results.contacted.values(): - assert result.get("src") == MVS_PS - assert result.get("dest") == USS_DEST_FILE + assert result.get("src") == mvs_ps + assert result.get("dest") == uss_dest_file assert result.get("backup_name") is not None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_FILE}") + tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: - hosts.all.file(path=USS_DEST_FILE, state="absent") + hosts.all.file(path=uss_dest_file, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): + uss_file = get_unique_uss_file_name() try: hosts = ansible_zos_module - MVS_PDS = get_tmp_ds_name() - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) - hosts.all.zos_data_set( - name=MVS_PDS, - state="present", - type="pds", - record_length=TEST_DATA_RECORD_LENGTH + mvs_ps = get_tmp_ds_name() + results = hosts.all.copy(content=TEST_DATA, dest=uss_file) + hosts.all.shell( + cmd="dtouch -tpds -l {1} {0}".format(mvs_ps, TEST_DATA_RECORD_LENGTH), ) results = hosts.all.zos_encode( - src=USS_FILE, - dest=MVS_PDS, + src=uss_file, + dest=mvs_ps, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == USS_FILE - assert result.get("dest") == MVS_PDS + assert result.get("src") == uss_file + assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True finally: - hosts.all.file(path=USS_FILE, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") + hosts.all.file(path=uss_file, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): + uss_file = get_unique_uss_file_name() try: hosts = ansible_zos_module - MVS_PDS = get_tmp_ds_name() - MVS_PDS_MEMBER = MVS_PDS + '(MEM)' - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) + mvs_ps = get_tmp_ds_name() + mvs_pds_member = mvs_ps + '(MEM)' + hosts.all.copy(content=TEST_DATA, dest=uss_file) hosts.all.zos_data_set( - name=MVS_PDS, + name=mvs_ps, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH ) results = hosts.all.zos_data_set( - name=MVS_PDS_MEMBER, type="member", state="present" + name=mvs_pds_member, type="member", state="present" ) for result in results.contacted.values(): # documentation will return changed=False if ds exists and replace=False.. # assert result.get("changed") is True assert result.get("module_stderr") is None results = hosts.all.zos_encode( - src=USS_FILE, - dest=MVS_PDS_MEMBER, + src=uss_file, + dest=mvs_pds_member, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == USS_FILE - assert result.get("dest") == MVS_PDS_MEMBER + assert result.get("src") == uss_file + assert result.get("dest") == mvs_pds_member assert result.get("backup_name") is None assert result.get("changed") is True finally: - hosts.all.file(path=USS_FILE, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") + hosts.all.file(path=uss_file, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): + uss_dest_file = get_unique_uss_file_name() try: hosts = ansible_zos_module - MVS_PDS = get_tmp_ds_name() - MVS_PDS_MEMBER = MVS_PDS + '(MEM)' + mvs_ps = get_tmp_ds_name() + mvs_pds_member = mvs_ps + '(MEM)' hosts.all.zos_data_set( - name=MVS_PDS, + name=mvs_ps, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH ) hosts.all.zos_data_set( - name=MVS_PDS_MEMBER, type="member", state="present" + name=mvs_pds_member, type="member", state="present" ) - hosts.all.copy(content=TEST_DATA, dest=MVS_PDS_MEMBER) - hosts.all.copy(content="test", dest=USS_DEST_FILE) + hosts.all.copy(content=TEST_DATA, dest=mvs_pds_member) + hosts.all.copy(content="test", dest=uss_dest_file) results = hosts.all.zos_encode( - src=MVS_PDS_MEMBER, - dest=USS_DEST_FILE, + src=mvs_pds_member, + dest=uss_dest_file, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, @@ -463,50 +474,52 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): backup=True, ) for result in results.contacted.values(): - assert result.get("src") == MVS_PDS_MEMBER - assert result.get("dest") == USS_DEST_FILE + assert result.get("src") == mvs_pds_member + assert result.get("dest") == uss_dest_file assert result.get("backup_name") is not None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_FILE}") + tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: - hosts.all.file(path=USS_DEST_FILE, state="absent") + hosts.all.file(path=uss_dest_file, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): try: hosts = ansible_zos_module - MVS_PDS = get_tmp_ds_name() - hosts.all.file(path=USS_PATH, state="directory") - hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode1") - hosts.all.copy(content=TEST_DATA, dest=USS_PATH + "/encode2") + mvs_ps = get_tmp_ds_name() + uss_path = get_unique_uss_file_name() + uss_dest_path = get_unique_uss_file_name() + hosts.all.file(path=uss_path, state="directory") + hosts.all.copy(content=TEST_DATA, dest=uss_path + "/encode1") + hosts.all.copy(content=TEST_DATA, dest=uss_path + "/encode2") hosts.all.zos_data_set( - name=MVS_PDS, + name=mvs_ps, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH ) results = hosts.all.zos_encode( - src=USS_PATH, - dest=MVS_PDS, + src=uss_path, + dest=mvs_ps, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == USS_PATH - assert result.get("dest") == MVS_PDS + assert result.get("src") == uss_path + assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True - hosts.all.file(path=USS_DEST_PATH, state="directory") + hosts.all.file(path=uss_dest_path, state="directory") results = hosts.all.zos_encode( - src=MVS_PDS, - dest=USS_DEST_PATH, + src=mvs_ps, + dest=uss_dest_path, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, @@ -514,59 +527,62 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): ) for result in results.contacted.values(): - assert result.get("src") == MVS_PDS - assert result.get("dest") == USS_DEST_PATH + assert result.get("src") == mvs_ps + assert result.get("dest") == uss_dest_path assert result.get("backup_name") is None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_PATH}") + tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_path}") for result in tag_results.contacted.values(): assert FROM_ENCODING in result.get("stdout") assert "untagged" not in result.get("stdout") finally: - hosts.all.file(path=USS_PATH, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.file(path=USS_DEST_PATH, state="absent") + hosts.all.file(path=uss_path, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.file(path=uss_dest_path, state="absent") def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): hosts = ansible_zos_module - MVS_PDS = get_tmp_ds_name() - MVS_PDS_MEMBER = MVS_PDS + '(MEM)' - MVS_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.shell(cmd=f"cp {quote(TEST_DATA)} \"//'{MVS_PS}'\" ") - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") + mvs_ps = get_tmp_ds_name() + mvs_pds = get_tmp_ds_name() + mvs_pds_member = mvs_pds + '(MEM)' + hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") + hosts.all.shell(cmd=f"cp {quote(TEST_DATA)} \"//'{mvs_ps}'\" ") + hosts.all.zos_data_set(name=mvs_pds, state="present", type="pds") hosts.all.zos_data_set( - name=MVS_PDS_MEMBER, type="member", state="present" + name=mvs_pds_member, type="member", state="present" ) results = hosts.all.zos_encode( - src=MVS_PS, - dest=MVS_PDS_MEMBER, + src=mvs_ps, + dest=mvs_pds_member, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == MVS_PS - assert result.get("dest") == MVS_PDS_MEMBER + assert result.get("src") == mvs_ps + assert result.get("dest") == mvs_pds_member assert result.get("backup_name") is None assert result.get("changed") is True - hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): + uss_file = get_unique_uss_file_name() + temp_jcl_path = get_unique_uss_file_name() try: hosts = ansible_zos_module - MVS_VS = get_tmp_ds_name(3) - hosts.all.copy(content=TEST_DATA, dest=USS_FILE) - hosts.all.file(path=TEMP_JCL_PATH, state="directory") + mvs_vs = get_tmp_ds_name(3) + hosts.all.copy(content=TEST_DATA, dest=uss_file) + hosts.all.file(path=temp_jcl_path, state="directory") hosts.all.shell( - cmd=f"echo {quote(KSDS_CREATE_JCL.format(MVS_VS))} > {TEMP_JCL_PATH}/SAMPLE" + cmd=f"echo {quote(KSDS_CREATE_JCL.format(mvs_vs))} > {temp_jcl_path}/SAMPLE" ) results = hosts.all.zos_job_submit( - src=f"{TEMP_JCL_PATH}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 ) for result in results.contacted.values(): @@ -574,34 +590,35 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True results = hosts.all.zos_encode( - src=USS_FILE, - dest=MVS_VS, + src=uss_file, + dest=mvs_vs, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == USS_FILE - assert result.get("dest") == MVS_VS + assert result.get("src") == uss_file + assert result.get("dest") == mvs_vs assert result.get("backup_name") is None assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_JCL_PATH, state="absent") - hosts.all.file(path=USS_FILE, state="absent") - hosts.all.zos_data_set(name=MVS_VS, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") + hosts.all.file(path=uss_file, state="absent") + hosts.all.zos_data_set(name=mvs_vs, state="absent") def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): + uss_dest_file = get_unique_uss_file_name() try: hosts = ansible_zos_module mlq_size = 3 - MVS_VS = get_tmp_ds_name(mlq_size) - create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) - hosts.all.file(path=USS_DEST_FILE, state="touch") + mvs_vs = get_tmp_ds_name(mlq_size) + create_vsam_data_set(hosts, mvs_vs, "ksds", add_data=True, key_length=12, key_offset=0) + hosts.all.file(path=uss_dest_file, state="touch") results = hosts.all.zos_encode( - src=MVS_VS, - dest=USS_DEST_FILE, + src=mvs_vs, + dest=uss_dest_file, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, @@ -609,94 +626,95 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): backup=True, ) for result in results.contacted.values(): - assert result.get("src") == MVS_VS - assert result.get("dest") == USS_DEST_FILE + assert result.get("src") == mvs_vs + assert result.get("dest") == uss_dest_file assert result.get("backup_name") is not None assert result.get("changed") is True - tag_results = hosts.all.shell(cmd=f"ls -T {USS_DEST_FILE}") + tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: - hosts.all.file(path=USS_DEST_FILE, state="absent") + hosts.all.file(path=uss_dest_file, state="absent") hosts.all.file(path=result.get("backup_name"), state="absent") - hosts.all.zos_data_set(name=MVS_VS, state="absent") + hosts.all.zos_data_set(name=mvs_vs, state="absent") def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): hosts = ansible_zos_module - MVS_PS = get_tmp_ds_name() - MVS_VS = get_tmp_ds_name() - create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) - hosts.all.zos_data_set(name=MVS_PS, state="absent") + mvs_ps = get_tmp_ds_name() + mvs_vs = get_tmp_ds_name() + create_vsam_data_set(hosts, mvs_vs, "ksds", add_data=True, key_length=12, key_offset=0) + hosts.all.zos_data_set(name=mvs_ps, state="absent") hosts.all.zos_data_set( - name=MVS_PS, + name=mvs_ps, state="present", type="seq", record_length=TEST_DATA_RECORD_LENGTH ) results = hosts.all.zos_encode( - src=MVS_VS, - dest=MVS_PS, + src=mvs_vs, + dest=mvs_ps, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == MVS_VS - assert result.get("dest") == MVS_PS + assert result.get("src") == mvs_vs + assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True - hosts.all.zos_data_set(name=MVS_VS, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="absent") + hosts.all.zos_data_set(name=mvs_vs, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): hosts = ansible_zos_module - MVS_VS = get_tmp_ds_name() - MVS_PDS = get_tmp_ds_name() - create_vsam_data_set(hosts, MVS_VS, "ksds", add_data=True, key_length=12, key_offset=0) - MVS_PDS_MEMBER = MVS_PDS + '(MEM)' + mvs_vs = get_tmp_ds_name() + mvs_ps = get_tmp_ds_name() + create_vsam_data_set(hosts, mvs_vs, "ksds", add_data=True, key_length=12, key_offset=0) + mvs_pds_member = mvs_ps + '(MEM)' hosts.all.zos_data_set( - name=MVS_PDS, + name=mvs_ps, state="present", type="pds", record_length=TEST_DATA_RECORD_LENGTH ) hosts.all.zos_data_set( - name=MVS_PDS_MEMBER, type="member", state="present" + name=mvs_pds_member, type="member", state="present" ) results = hosts.all.zos_encode( - src=MVS_VS, - dest=MVS_PDS_MEMBER, + src=mvs_vs, + dest=mvs_pds_member, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, ) - hosts.all.zos_data_set(name=MVS_PDS, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") for result in results.contacted.values(): - assert result.get("src") == MVS_VS - assert result.get("dest") == MVS_PDS_MEMBER + assert result.get("src") == mvs_vs + assert result.get("dest") == mvs_pds_member assert result.get("backup_name") is None assert result.get("changed") is True - hosts.all.zos_data_set(name=MVS_VS, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") + hosts.all.zos_data_set(name=mvs_vs, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): try: hosts = ansible_zos_module - MVS_VS = get_tmp_ds_name(3) - MVS_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.file(path=TEMP_JCL_PATH, state="directory") + mvs_vs = get_tmp_ds_name(3) + mvs_ps = get_tmp_ds_name() + temp_jcl_path = get_unique_uss_file_name() + hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") + hosts.all.file(path=temp_jcl_path, state="directory") hosts.all.shell( - cmd=f"echo {quote(KSDS_CREATE_JCL.format(MVS_VS))} > {TEMP_JCL_PATH}/SAMPLE" + cmd=f"echo {quote(KSDS_CREATE_JCL.format(mvs_vs))} > {temp_jcl_path}/SAMPLE" ) results = hosts.all.zos_job_submit( - src=f"{TEMP_JCL_PATH}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 ) for result in results.contacted.values(): assert result.get("jobs") is not None @@ -705,22 +723,22 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): assert result.get("changed") is True #hosts.all.zos_copy(content=TEST_DATA, dest=MVS_PS) results = hosts.all.zos_encode( - src=MVS_PS, - dest=MVS_VS, + src=mvs_ps, + dest=mvs_vs, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, ) for result in results.contacted.values(): - assert result.get("src") == MVS_PS - assert result.get("dest") == MVS_VS + assert result.get("src") == mvs_ps + assert result.get("dest") == mvs_vs assert result.get("backup_name") is None assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_JCL_PATH, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=MVS_VS, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_vs, state="absent") def test_uss_encoding_conversion_src_with_special_chars(ansible_zos_module): @@ -752,43 +770,47 @@ def test_uss_encoding_conversion_src_with_special_chars(ansible_zos_module): def test_pds_backup(ansible_zos_module): try: hosts = ansible_zos_module - MVS_PDS = get_tmp_ds_name() - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") - hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") - hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE)'\"") + mvs_ps = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + temp_jcl_path = get_unique_uss_file_name() + hosts.all.zos_data_set(name=backup_data_set, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="present", type="pds") + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}") + hosts.all.shell(cmd=f"cp {temp_jcl_path} \"//'{mvs_ps}(SAMPLE)'\"") hosts.all.zos_encode( - src=MVS_PDS, + src=mvs_ps, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, backup=True, - backup_name=BACKUP_DATA_SET, + backup_name=backup_data_set, ) - contents = hosts.all.shell(cmd=f"cat \"//'{BACKUP_DATA_SET}(SAMPLE)'\"") + contents = hosts.all.shell(cmd=f"cat \"//'{backup_data_set}(SAMPLE)'\"") for content in contents.contacted.values(): # pprint(content) assert TEST_FILE_TEXT in content.get("stdout") finally: - hosts.all.file(path=TEMP_JCL_PATH, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=backup_data_set, state="absent") def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): try: hosts = ansible_zos_module - MVS_PDS = get_tmp_ds_name() + mvs_ps = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + temp_jcl_path = get_unique_uss_file_name() tmphlq = "TMPHLQ" - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") - hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") - hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE)'\"") + hosts.all.zos_data_set(name=backup_data_set, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="present", type="pds") + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}") + hosts.all.shell(cmd=f"cp {temp_jcl_path} \"//'{mvs_ps}(SAMPLE)'\"") encode_res = hosts.all.zos_encode( - src=MVS_PDS, + src=mvs_ps, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, @@ -799,135 +821,141 @@ def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): for enc_res in encode_res.contacted.values(): assert enc_res.get("backup_name")[:6] == tmphlq contents = hosts.all.shell(cmd="cat \"//'{0}(SAMPLE)'\"".format(enc_res.get("backup_name"))) - hosts.all.file(path=TEMP_JCL_PATH, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=backup_data_set, state="absent") for content in contents.contacted.values(): # pprint(content) assert TEST_FILE_TEXT in content.get("stdout") finally: - hosts.all.file(path=TEMP_JCL_PATH, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=backup_data_set, state="absent") def test_ps_backup(ansible_zos_module): try: hosts = ansible_zos_module - MVS_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") - hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PS}'\"") + mvs_ps = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + temp_jcl_path = get_unique_uss_file_name() + hosts.all.zos_data_set(name=backup_data_set, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}") + hosts.all.shell(cmd=f"cp {temp_jcl_path} \"//'{mvs_ps}'\"") hosts.all.zos_encode( - src=MVS_PS, + src=mvs_ps, encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, }, backup=True, - backup_name=BACKUP_DATA_SET, + backup_name=backup_data_set, ) - contents = hosts.all.shell(cmd=f"cat \"//'{BACKUP_DATA_SET}'\"") + contents = hosts.all.shell(cmd=f"cat \"//'{backup_data_set}'\"") for content in contents.contacted.values(): assert TEST_FILE_TEXT in content.get("stdout") finally: - hosts.all.file(path=TEMP_JCL_PATH, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=backup_data_set, state="absent") def test_vsam_backup(ansible_zos_module): try: hosts = ansible_zos_module - MVS_VS = get_tmp_ds_name() - MVS_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") - hosts.all.zos_data_set(name=MVS_VS, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="absent") + backup_data_set = get_tmp_ds_name() + mvs_vs = get_tmp_ds_name() + mvs_ps = get_tmp_ds_name() + temp_jcl_path = get_unique_uss_file_name() + hosts.all.zos_data_set(name=backup_data_set, state="absent") + hosts.all.zos_data_set(name=mvs_vs, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") hosts.all.zos_data_set( - name=MVS_PS, state="present", record_length=TEST_DATA_RECORD_LENGTH, type="seq" + name=mvs_ps, state="present", record_length=TEST_DATA_RECORD_LENGTH, type="seq" ) - hosts.all.file(path=TEMP_JCL_PATH, state="directory") + hosts.all.file(path=temp_jcl_path, state="directory") hosts.all.shell( - cmd=f"echo {quote(KSDS_CREATE_JCL.format(MVS_VS))} > {TEMP_JCL_PATH}/SAMPLE" + cmd=f"echo {quote(KSDS_CREATE_JCL.format(mvs_vs))} > {temp_jcl_path}/SAMPLE" ) hosts.all.zos_job_submit( - src=f"{TEMP_JCL_PATH}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 ) - hosts.all.file(path=TEMP_JCL_PATH, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") # submit JCL to populate KSDS - hosts.all.file(path=TEMP_JCL_PATH, state="directory") + hosts.all.file(path=temp_jcl_path, state="directory") hosts.all.shell( - cmd=f"echo {quote(KSDS_REPRO_JCL.format(MVS_VS.upper()))} > {TEMP_JCL_PATH}/SAMPLE" + cmd=f"echo {quote(KSDS_REPRO_JCL.format(mvs_vs.upper()))} > {temp_jcl_path}/SAMPLE" ) hosts.all.zos_job_submit( - src=f"{TEMP_JCL_PATH}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 ) hosts.all.zos_encode( - src=MVS_VS, - dest=MVS_PS, + src=mvs_vs, + dest=mvs_ps, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, ) hosts.all.zos_encode( - src=MVS_VS, + src=mvs_vs, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, backup=True, - backup_name=BACKUP_DATA_SET, + backup_name=backup_data_set, ) hosts.all.zos_data_set( - name=MVS_PS, state="present", record_length=TEST_DATA_RECORD_LENGTH, type="seq" + name=mvs_ps, state="present", record_length=TEST_DATA_RECORD_LENGTH, type="seq" ) hosts.all.zos_encode( - src=BACKUP_DATA_SET, - dest=MVS_PS, + src=backup_data_set, + dest=mvs_ps, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, ) finally: - hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=MVS_VS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") - hosts.all.file(path=TEMP_JCL_PATH, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_vs, state="absent") + hosts.all.zos_data_set(name=backup_data_set, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module): try: hosts = ansible_zos_module - MVS_PDS = get_tmp_ds_name() - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + mvs_ps = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + temp_jcl_path = get_unique_uss_file_name() + hosts.all.zos_data_set(name=backup_data_set, state="absent") # create and fill PDS - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") - hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") - hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE)'\"") - hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE2)'\"") - hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE3)'\"") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="present", type="pds") + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}") + hosts.all.shell(cmd=f"cp {temp_jcl_path} \"//'{mvs_ps}(SAMPLE)'\"") + hosts.all.shell(cmd=f"cp {temp_jcl_path} \"//'{mvs_ps}(SAMPLE2)'\"") + hosts.all.shell(cmd=f"cp {temp_jcl_path} \"//'{mvs_ps}(SAMPLE3)'\"") # create and fill directory - hosts.all.file(path=TEMP_JCL_PATH + "2", state="absent") - hosts.all.file(path=TEMP_JCL_PATH + "2", state="directory") + hosts.all.file(path=temp_jcl_path + "2", state="absent") + hosts.all.file(path=temp_jcl_path + "2", state="directory") hosts.all.shell( - cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file1" + cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}2/file1" ) hosts.all.shell( - cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file2" + cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}2/file2" ) hosts.all.shell( - cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file3" + cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}2/file3" ) results = hosts.all.zos_encode( - src=MVS_PDS, - dest=TEMP_JCL_PATH + "2", + src=mvs_ps, + dest=temp_jcl_path + "2", encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, @@ -957,11 +985,11 @@ def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module) and content1 == TEST_FILE_TEXT ) finally: - hosts.all.file(path=TEMP_JCL_PATH, state="absent") - hosts.all.file(path=TEMP_JCL_PATH + "2", state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") + hosts.all.file(path=temp_jcl_path + "2", state="absent") hosts.all.file(path=backup_name, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=backup_data_set, state="absent") def test_uss_backup_entire_folder_to_default_backup_location_compressed( @@ -969,30 +997,32 @@ def test_uss_backup_entire_folder_to_default_backup_location_compressed( ): try: hosts = ansible_zos_module - MVS_PDS = get_tmp_ds_name() - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + mvs_ps = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + temp_jcl_path = get_unique_uss_file_name() + hosts.all.zos_data_set(name=backup_data_set, state="absent") # create and fill PDS - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.zos_data_set(name=MVS_PDS, state="present", type="pds") - hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}") - hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE)'\"") - hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE2)'\"") - hosts.all.shell(cmd=f"cp {TEMP_JCL_PATH} \"//'{MVS_PDS}(SAMPLE3)'\"") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="present", type="pds") + hosts.all.shell(cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}") + hosts.all.shell(cmd=f"cp {temp_jcl_path} \"//'{mvs_ps}(SAMPLE)'\"") + hosts.all.shell(cmd=f"cp {temp_jcl_path} \"//'{mvs_ps}(SAMPLE2)'\"") + hosts.all.shell(cmd=f"cp {temp_jcl_path} \"//'{mvs_ps}(SAMPLE3)'\"") # create and fill directory - hosts.all.file(path=TEMP_JCL_PATH + "2", state="absent") - hosts.all.file(path=TEMP_JCL_PATH + "2", state="directory") + hosts.all.file(path=temp_jcl_path + "2", state="absent") + hosts.all.file(path=temp_jcl_path + "2", state="directory") hosts.all.shell( - cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file1" + cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}2/file1" ) hosts.all.shell( - cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file2" + cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}2/file2" ) hosts.all.shell( - cmd=f"echo '{TEST_FILE_TEXT}' > {TEMP_JCL_PATH}2/file3" + cmd=f"echo '{TEST_FILE_TEXT}' > {temp_jcl_path}2/file3" ) results = hosts.all.zos_encode( - src=MVS_PDS, - dest=TEMP_JCL_PATH + "2", + src=mvs_ps, + dest=temp_jcl_path + "2", encoding={ "from": TO_ENCODING, "to": FROM_ENCODING, @@ -1008,52 +1038,52 @@ def test_uss_backup_entire_folder_to_default_backup_location_compressed( for result in results.contacted.values(): assert backup_name in result.get("stdout") finally: - hosts.all.zos_data_set(name=MVS_PDS, state="absent") - hosts.all.file(path=TEMP_JCL_PATH, state="absent") - hosts.all.file(path=TEMP_JCL_PATH + "2", state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.file(path=temp_jcl_path, state="absent") + hosts.all.file(path=temp_jcl_path + "2", state="absent") hosts.all.file(path=backup_name, state="absent") def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): try: hosts = ansible_zos_module - MVS_PS = get_tmp_ds_name() - hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="present", type="seq") - hosts.all.shell(cmd=f"decho \"{TEST_FILE_TEXT}\" \"{MVS_PS}\"") + mvs_ps = get_tmp_ds_name() + backup_data_set = get_tmp_ds_name() + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=backup_data_set, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") + hosts.all.shell(cmd=f"decho \"{TEST_FILE_TEXT}\" \"{mvs_ps}\"") enc_ds = hosts.all.zos_encode( - src=MVS_PS, + src=mvs_ps, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, }, backup=True, - backup_name=BACKUP_DATA_SET, + backup_name=backup_data_set, ) for content in enc_ds.contacted.values(): assert content.get("backup_name") is not None - assert content.get("backup_name") == BACKUP_DATA_SET + assert content.get("backup_name") == backup_data_set - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + hosts.all.zos_data_set(name=backup_data_set, state="absent") enc_ds = hosts.all.zos_encode( - src=MVS_PS, + src=mvs_ps, encoding={ "from": INVALID_ENCODING, "to": TO_ENCODING, }, backup=True, - backup_name=BACKUP_DATA_SET, + backup_name=backup_data_set, ) for content in enc_ds.contacted.values(): assert content.get("msg") is not None assert content.get("backup_name") is not None - assert content.get("backup_name") == BACKUP_DATA_SET + assert content.get("backup_name") == backup_data_set finally: - hosts.all.file(path=TEMP_JCL_PATH, state="absent") - hosts.all.zos_data_set(name=MVS_PS, state="absent") - hosts.all.zos_data_set(name=BACKUP_DATA_SET, state="absent") + hosts.all.zos_data_set(name=mvs_ps, state="absent") + hosts.all.zos_data_set(name=backup_data_set, state="absent") @pytest.mark.parametrize("generation", ["-1", "+1"]) @@ -1111,6 +1141,7 @@ def test_gdg_encoding_conversion_invalid_gdg(ansible_zos_module): def test_encoding_conversion_gds_to_uss_file(ansible_zos_module): + uss_dest_file = get_unique_uss_file_name() try: hosts = ansible_zos_module ds_name = get_tmp_ds_name() @@ -1123,7 +1154,7 @@ def test_encoding_conversion_gds_to_uss_file(ansible_zos_module): results = hosts.all.zos_encode( src=gds_name, - dest=USS_DEST_FILE, + dest=uss_dest_file, encoding={ "from": FROM_ENCODING, "to": TO_ENCODING, @@ -1138,14 +1169,14 @@ def test_encoding_conversion_gds_to_uss_file(ansible_zos_module): assert ds_name in src assert re.fullmatch(gds_pattern, src.split(".")[-1]) - assert result.get("dest") == USS_DEST_FILE + assert result.get("dest") == uss_dest_file assert result.get("changed") is True - tag_results = hosts.all.shell(cmd="ls -T {0}".format(USS_DEST_FILE)) + tag_results = hosts.all.shell(cmd="ls -T {0}".format(uss_dest_file)) for result in tag_results.contacted.values(): assert TO_ENCODING in result.get("stdout") finally: - hosts.all.file(path=USS_DEST_FILE, state="absent") + hosts.all.file(path=uss_dest_file, state="absent") hosts.all.shell(cmd=f"""drm "{ds_name}(0)" """) hosts.all.shell(cmd=f"drm {ds_name}") @@ -1191,12 +1222,12 @@ def test_encoding_conversion_gds_no_dest(ansible_zos_module): assert int(result.get("stdout")) > 0 finally: - hosts.all.file(path=USS_FILE, state="absent") hosts.all.shell(cmd=f"""drm "{gds_name}" """) hosts.all.shell(cmd=f"drm {ds_name}") def test_encoding_conversion_uss_file_to_gds(ansible_zos_module): + uss_file = get_unique_uss_file_name() try: hosts = ansible_zos_module ds_name = get_tmp_ds_name() @@ -1205,10 +1236,10 @@ def test_encoding_conversion_uss_file_to_gds(ansible_zos_module): hosts.all.shell(cmd=f"dtouch -tGDG -L3 {ds_name}") hosts.all.shell(cmd=f"""dtouch -tseq "{ds_name}(+1)" """) - hosts.all.shell(cmd=f"echo \"{TEST_DATA}\" > {USS_FILE}") + hosts.all.shell(cmd=f"echo \"{TEST_DATA}\" > {uss_file}") results = hosts.all.zos_encode( - src=USS_FILE, + src=uss_file, dest=gds_name, encoding={ "from": FROM_ENCODING, @@ -1229,7 +1260,7 @@ def test_encoding_conversion_uss_file_to_gds(ansible_zos_module): assert ds_name in dest assert re.fullmatch(gds_pattern, dest.split(".")[-1]) - assert result.get("src") == USS_FILE + assert result.get("src") == uss_file assert result.get("changed") is True for result in dest_existence_check.contacted.values(): @@ -1237,7 +1268,7 @@ def test_encoding_conversion_uss_file_to_gds(ansible_zos_module): assert int(result.get("stdout")) > 0 finally: - hosts.all.file(path=USS_FILE, state="absent") + hosts.all.file(path=uss_file, state="absent") hosts.all.shell(cmd=f"""drm "{gds_name}" """) hosts.all.shell(cmd=f"drm {ds_name}") From 63836bc7b9073b4545a9b9373898264479b3612f Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 14 Aug 2024 12:41:27 -0600 Subject: [PATCH 451/495] [Enabler] [zos_find] Test suite portability changes (#1656) * Change datasets used in age test * Change HLQ for datasets used * Add changelog fragment * Remove hardcoded volumes * Remove default volume --- .../fragments/1656-zos_find_portability.yml | 3 + .../functional/modules/test_zos_find_func.py | 86 +++++++++++-------- 2 files changed, 54 insertions(+), 35 deletions(-) create mode 100644 changelogs/fragments/1656-zos_find_portability.yml diff --git a/changelogs/fragments/1656-zos_find_portability.yml b/changelogs/fragments/1656-zos_find_portability.yml new file mode 100644 index 000000000..ca32a4580 --- /dev/null +++ b/changelogs/fragments/1656-zos_find_portability.yml @@ -0,0 +1,3 @@ +trivial: + - test_zos_find_func.py - Remove the use of hard coded dataset names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1656). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 5f0227878..7c3041eea 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -21,28 +21,29 @@ import pytest +# hlq used across the test suite. +TEST_SUITE_HLQ = "ANSIBLE" + SEQ_NAMES = [ - "TEST.FIND.SEQ.FUNCTEST.FIRST", - "TEST.FIND.SEQ.FUNCTEST.SECOND", - "TEST.FIND.SEQ.FUNCTEST.THIRD" + f"{TEST_SUITE_HLQ}.FIND.SEQ.FUNCTEST.FIRST", + f"{TEST_SUITE_HLQ}.FIND.SEQ.FUNCTEST.SECOND", + f"{TEST_SUITE_HLQ}.FIND.SEQ.FUNCTEST.THIRD" ] PDS_NAMES = [ - "TEST.FIND.PDS.FUNCTEST.FIRST", - "TEST.FIND.PDS.FUNCTEST.SECOND", - "TEST.FIND.PDS.FUNCTEST.THIRD" + f"{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.FIRST", + f"{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.SECOND", + f"{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.THIRD" ] VSAM_NAMES = [ - "TEST.FIND.VSAM.FUNCTEST.FIRST" + f"{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.FIRST" ] DATASET_TYPES = ['seq', 'pds', 'pdse'] -# hlq used across the test suite. -TEST_SUITE_HLQ = "ANSIBLE" -def create_vsam_ksds(ds_name, ansible_zos_module, volume="000000"): +def create_vsam_ksds(ds_name, ansible_zos_module, volume): hosts = ansible_zos_module alloc_cmd = f""" DEFINE CLUSTER (NAME({ds_name}) - INDEXED - @@ -141,7 +142,7 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}\" ") find_res = hosts.all.zos_find( - patterns=['TEST.FIND.SEQ.*.*'], + patterns=[f'{TEST_SUITE_HLQ}.FIND.SEQ.*.*'], contains=search_string ) for val in find_res.contacted.values(): @@ -164,7 +165,7 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): def test_find_sequential_data_sets_multiple_patterns(ansible_zos_module): hosts = ansible_zos_module search_string = "dummy string" - new_ds = "TEST.FIND.SEQ.FUNCTEST.FOURTH" + new_ds = f"{TEST_SUITE_HLQ}.FIND.SEQ.FUNCTEST.FOURTH" try: hosts.all.zos_data_set( batch=[ @@ -181,7 +182,7 @@ def test_find_sequential_data_sets_multiple_patterns(ansible_zos_module): hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}\" ") find_res = hosts.all.zos_find( - patterns=['TEST.FIND.SEQ.*.*', 'TEST.INVALID.*'], + patterns=[f'{TEST_SUITE_HLQ}.FIND.SEQ.*.*', f'{TEST_SUITE_HLQ}.INVALID.*'], contains=search_string ) for val in find_res.contacted.values(): @@ -231,7 +232,7 @@ def test_find_pds_members_containing_string(ansible_zos_module): result = hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}(MEMBER)\" ") find_res = hosts.all.zos_find( - pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], + pds_paths=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], contains=search_string, patterns=['.*'] ) @@ -265,7 +266,7 @@ def test_exclude_data_sets_from_matched_list(ansible_zos_module): ] ) find_res = hosts.all.zos_find( - patterns=['TEST.FIND.SEQ.*.*'], + patterns=[f'{TEST_SUITE_HLQ}.FIND.SEQ.*.*'], excludes=['.*THIRD$'] ) for val in find_res.contacted.values(): @@ -312,7 +313,9 @@ def test_exclude_members_from_matched_list(ansible_zos_module): ] ) find_res = hosts.all.zos_find( - pds_paths=['TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] + pds_paths=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], + excludes=['.*FILE$'], + patterns=['.*'] ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 3 @@ -332,7 +335,7 @@ def test_exclude_members_from_matched_list(ansible_zos_module): def test_find_data_sets_older_than_age(ansible_zos_module): hosts = ansible_zos_module find_res = hosts.all.zos_find( - patterns=['IMSTESTL.IMS01.RESTART', "IMSTESTL.IMS01.LGMSGL".lower()], + patterns=['SYS1.PARMLIB', "SYS1.PROCLIB".lower()], age='2d' ) for val in find_res.contacted.values(): @@ -343,12 +346,12 @@ def test_find_data_sets_older_than_age(ansible_zos_module): @pytest.mark.parametrize("ds_type", DATASET_TYPES) def test_find_data_sets_larger_than_size(ansible_zos_module, ds_type): hosts = ansible_zos_module - TEST_PS1 = 'TEST.PS.ONE' - TEST_PS2 = 'TEST.PS.TWO' + TEST_PS1 = f'{TEST_SUITE_HLQ}.PS.ONE' + TEST_PS2 = f'{TEST_SUITE_HLQ}.PS.TWO' try: res = hosts.all.zos_data_set(name=TEST_PS1, state="present", space_primary="1", space_type="m", type=ds_type) res = hosts.all.zos_data_set(name=TEST_PS2, state="present", space_primary="1", space_type="m", type=ds_type) - find_res = hosts.all.zos_find(patterns=['TEST.PS.*'], size="1k") + find_res = hosts.all.zos_find(patterns=[f'{TEST_SUITE_HLQ}.PS.*'], size="1k") for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 assert val.get('matched') == 2 @@ -359,10 +362,10 @@ def test_find_data_sets_larger_than_size(ansible_zos_module, ds_type): def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts = ansible_zos_module - TEST_PS = 'USER.FIND.TEST' + TEST_PS = f'{TEST_SUITE_HLQ}.FIND.TEST' try: hosts.all.zos_data_set(name=TEST_PS, state="present", type="seq", space_primary="1", space_type="k") - find_res = hosts.all.zos_find(patterns=['USER.FIND.*'], size='-1m') + find_res = hosts.all.zos_find(patterns=[f'{TEST_SUITE_HLQ}.FIND.*'], size='-1m') for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == 1 @@ -370,11 +373,12 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module): hosts.all.zos_data_set(name=TEST_PS, state="absent") -def test_find_data_sets_in_volume(ansible_zos_module): +def test_find_data_sets_in_volume(ansible_zos_module, volumes_on_systems): try: hosts = ansible_zos_module - data_set_name = "TEST.FIND.SEQ" - volume = "000000" + data_set_name = f"{TEST_SUITE_HLQ}.FIND.SEQ" + volumes = Volume_Handler(volumes_on_systems) + volume = volumes.get_available_vol() # Create temp data set hosts.all.zos_data_set(name=data_set_name, type="seq", state="present", volumes=[volume]) find_res = hosts.all.zos_find( @@ -388,13 +392,18 @@ def test_find_data_sets_in_volume(ansible_zos_module): -def test_find_vsam_pattern(ansible_zos_module): +def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module try: + volumes = Volume_Handler(volumes_on_systems) + for vsam in VSAM_NAMES: - create_vsam_ksds(vsam, hosts) + volume = volumes.get_available_vol() + create_vsam_ksds(vsam, hosts, volume) + find_res = hosts.all.zos_find( - patterns=['TEST.FIND.VSAM.FUNCTEST.*'], resource_type='cluster' + patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.*'], + resource_type='cluster' ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 @@ -415,13 +424,15 @@ def test_find_vsam_in_volume(ansible_zos_module, volumes_on_systems): volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() volume_2 = volumes.get_available_vol() - alternate_vsam = "TEST.FIND.VSAM.SECOND" + alternate_vsam = f"{TEST_SUITE_HLQ}.FIND.VSAM.SECOND" try: for vsam in VSAM_NAMES: - create_vsam_ksds(vsam, hosts, volume=volume_1) - create_vsam_ksds(alternate_vsam, hosts, volume=volume_2) + create_vsam_ksds(vsam, hosts, volume_1) + create_vsam_ksds(alternate_vsam, hosts, volume_2) find_res = hosts.all.zos_find( - patterns=['TEST.FIND.VSAM.*.*'], volumes=[volume_1], resource_type='cluster' + patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.*.*'], + volumes=[volume_1], + resource_type='cluster' ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 @@ -454,7 +465,7 @@ def test_find_invalid_size_indicator_fails(ansible_zos_module): def test_find_non_existent_data_sets(ansible_zos_module): hosts = ansible_zos_module - find_res = hosts.all.zos_find(patterns=['TEST.FIND.NONE.*.*']) + find_res = hosts.all.zos_find(patterns=[f'{TEST_SUITE_HLQ}.FIND.NONE.*.*']) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 @@ -462,7 +473,10 @@ def test_find_non_existent_data_sets(ansible_zos_module): def test_find_non_existent_data_set_members(ansible_zos_module): hosts = ansible_zos_module - find_res = hosts.all.zos_find(pds_paths=['TEST.NONE.PDS.*'], patterns=['.*']) + find_res = hosts.all.zos_find( + pds_paths=[f'{TEST_SUITE_HLQ}.NONE.PDS.*'], + patterns=['.*'] + ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 @@ -497,7 +511,9 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): ] ) find_res = hosts.all.zos_find( - pds_paths=['TEST.NONE.PDS.*','TEST.FIND.PDS.FUNCTEST.*'], excludes=['.*FILE$'], patterns=['.*'] + pds_paths=[f'{TEST_SUITE_HLQ}.NONE.PDS.*',f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], + excludes=['.*FILE$'], + patterns=['.*'] ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 3 From d22ea12203fd968f8b560b3f379bbb6251d0282f Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Fri, 16 Aug 2024 13:49:23 -0600 Subject: [PATCH 452/495] Merge v1.11.0-beta.1 release into dev. (#1655) * Solved merge conflicts * Updated release notes * Fixed trailing parenthesis * Updated zos_mvs_raw merge conflicts * Updated test_zos_mvs_raw git merge from dev branch * Removed unused import * Updated RST files * Fixed pep8 issue * Updated backslashes for samples * Fixed backslashes in docs * Updated RSTs * Updated newline sequences * Removed extra file --- CHANGELOG.rst | 45 ++ README.md | 13 +- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 125 ++++++ ...summary.yml => v1.11.0-beta.1_summary.yml} | 2 +- docs/source/modules/zos_apf.rst | 68 +-- docs/source/modules/zos_archive.rst | 121 +++--- docs/source/modules/zos_blockinfile.rst | 70 ++-- docs/source/modules/zos_copy.rst | 212 +++++----- docs/source/modules/zos_data_set.rst | 190 ++++----- docs/source/modules/zos_encode.rst | 36 +- docs/source/modules/zos_fetch.rst | 20 +- docs/source/modules/zos_find.rst | 87 +++- docs/source/modules/zos_gather_facts.rst | 14 +- docs/source/modules/zos_job_output.rst | 16 +- docs/source/modules/zos_job_query.rst | 20 +- docs/source/modules/zos_job_submit.rst | 64 +-- docs/source/modules/zos_lineinfile.rst | 86 ++-- docs/source/modules/zos_mount.rst | 86 ++-- docs/source/modules/zos_mvs_raw.rst | 393 ++++++++++-------- docs/source/modules/zos_operator.rst | 4 +- .../modules/zos_operator_action_query.rst | 20 +- docs/source/modules/zos_ping.rst | 8 +- docs/source/modules/zos_script.rst | 32 +- docs/source/modules/zos_tso_command.rst | 4 +- docs/source/modules/zos_unarchive.rst | 71 ++-- docs/source/modules/zos_volume_init.rst | 34 +- docs/source/release_notes.rst | 130 +++++- .../source/resources/releases_maintenance.rst | 5 + galaxy.yml | 2 +- meta/ibm_zos_core_meta.yml | 4 +- plugins/action/zos_fetch.py | 2 +- plugins/action/zos_script.py | 2 +- plugins/doc_fragments/template.py | 2 +- plugins/doc_fragments/template.py-e | 120 ++++++ plugins/module_utils/backup.py | 5 +- plugins/module_utils/data_set.py | 74 ++-- plugins/module_utils/vtoc.py | 2 +- plugins/modules/zos_apf.py | 26 +- plugins/modules/zos_archive.py | 4 +- plugins/modules/zos_blockinfile.py | 4 +- plugins/modules/zos_encode.py | 2 +- plugins/modules/zos_find.py | 1 - plugins/modules/zos_job_submit.py | 4 +- plugins/modules/zos_lineinfile.py | 4 +- plugins/modules/zos_mvs_raw.py | 4 +- plugins/modules/zos_unarchive.py | 6 +- .../modules/test_module_security.py | 2 +- tests/functional/modules/test_zos_apf_func.py | 5 - .../modules/test_zos_backup_restore.py | 2 +- .../modules/test_zos_blockinfile_func.py | 50 ++- .../functional/modules/test_zos_copy_func.py | 93 +++++ 52 files changed, 1556 insertions(+), 842 deletions(-) rename changelogs/fragments/{v1.10.0_summary.yml => v1.11.0-beta.1_summary.yml} (92%) create mode 100644 plugins/doc_fragments/template.py-e diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9efc1ea61..d23ceb7ed 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,6 +4,51 @@ ibm.ibm\_zos\_core Release Notes .. contents:: Topics +v1.11.0-beta.1 +============== + +Release Summary +--------------- + +Release Date: '2024-08-05' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- zos_apf - Change input to auto-escape 'library' names containing symbols (https://github.com/ansible-collections/ibm_zos_core/pull/1493). +- zos_archive - Added support for GDG and GDS relative name notation to archive data sets. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). +- zos_backup_restore - Added support for GDS relative name notation to include or exclude data sets when operation is backup. Added support for data set names with special characters like $, /#, and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1527). +- zos_blockinfile - Added support for GDG and GDS relative name notation to use a data set. And backup in new generations. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). +- zos_copy - add support for copying generation data sets (GDS) and generation data groups (GDG), as well as using a GDS for backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1564). +- zos_data_set - Added support for GDG and GDS relative name notation to create, delete, catalog and uncatalog a data set. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1504). +- zos_encode - add support for encoding generation data sets (GDS), as well as using a GDS for backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1531). +- zos_fetch - add support for fetching generation data groups and generation data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1519) +- zos_find - added support for GDG/GDS and special characters (https://github.com/ansible-collections/ibm_zos_core/pull/1518). +- zos_job_submit - Improved the copy to remote mechanic to avoid using deepcopy that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). +- zos_job_submit - add support for generation data groups and generation data sets as sources for jobs. (https://github.com/ansible-collections/ibm_zos_core/pull/1497) +- zos_lineinfile - Added support for GDG and GDS relative name notation to use a data set. And backup in new generations. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). +- zos_mount - Added support for data set names with special characters ($, /#, /- and @). This is for both src and backup data set names. (https://github.com/ansible-collections/ibm_zos_core/pull/1631). +- zos_tso_command - Added support for GDG and GDS relative name notation to use a data set name. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1563). +- zos_mvs_raw - Added support for GDG and GDS relative name notation to use a data set. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1525). +- zos_mvs_raw - Added support for GDG and GDS relative positive name notation to use a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1541). +- zos_mvs_raw - Redesign the wrappers of dd clases to use properly the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1470). +- zos_script - Improved the copy to remote mechanic to avoid using deepcopy that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). +- zos_unarchive - Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). +- zos_unarchive - Improved the copy to remote mechanic to avoid using deepcopy that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + +Bugfixes +-------- + +- module_util/data_set.py - DataSet.data_set_cataloged function previously only returned True or False, but failed to account for exceptions which occurred during the LISTCAT. The fix now raises an MVSCmdExecError if the return code from LISTCAT is too high. (https://github.com/ansible-collections/ibm_zos_core/pull/1535). +- zos_copy - a regression in version 1.4.0 made the module stop automatically computing member names when copying a single file into a PDS/E. Fix now lets a user copy a single file into a PDS/E without adding a member in the dest option. (https://github.com/ansible-collections/ibm_zos_core/pull/1570). +- zos_copy - module would use opercmd to check if a non existent destination data set is locked. Fix now only checks if the destination is already present. (https://github.com/ansible-collections/ibm_zos_core/pull/1623). +- zos_job_submit - Was not propagating any error types UnicodeDecodeError, JSONDecodeError, TypeError, KeyError when encountered, now the error message shares the type error. (https://github.com/ansible-collections/ibm_zos_core/pull/1560). +- zos_mvs_raw - DD_output first character from each line was missing. Change now includes the first character of each line. (https://github.com/ansible-collections/ibm_zos_core/pull/1543). + v1.10.0 ======= diff --git a/README.md b/README.md index 629ce15b4..e0d274bad 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ To upgrade the collection to the latest available version, run the following com ansible-galaxy collection install ibm.ibm_zos_core --upgrade ``` -<br/>You can also install a specific version of the collection, for example, if you need to downgrade for some reason. Use the following syntax to install version 1.0.0: +<br/>You can also install a specific version of the collection, for example, if you need to install a different version. Use the following syntax to install version 1.0.0: ```sh ansible-galaxy collection install ibm.ibm_zos_core:1.0.0 @@ -123,7 +123,7 @@ environment_vars: ## Testing -All releases, will meet the following test criteria. +All releases will meet the following test criteria. * 100% success for [Functional](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/functional) tests. * 100% success for [Unit](https://github.com/ansible-collections/ibm_zos_core/tree/dev/tests/unit) tests. @@ -134,9 +134,9 @@ All releases, will meet the following test criteria. <br/>This release of the collection was tested with following dependencies. * ansible-core v2.15.x -* Python 3.9.x +* Python 3.11.x * IBM Open Enterprise SDK for Python 3.11.x -* IBM Z Open Automation Utilities (ZOAU) 1.3.0.x +* IBM Z Open Automation Utilities (ZOAU) 1.3.1.x * z/OS V2R5 This release introduces case sensitivity for option values and includes a porting guide in the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) to assist with which option values will need to be updated. @@ -177,9 +177,10 @@ For Galaxy and GitHub users, to see the supported ansible-core versions, review | Version | Status | Release notes | Changelogs | |----------|----------------|---------------|------------| -| 1.11.x | In development | unreleased | unreleased | +| 1.12.x | In development | unreleased | unreleased | +| 1.11.x | In preview | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-11-0-beta.1) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.11.0-beta.1/CHANGELOG.rst) | | 1.10.x | Current | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-10-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0/CHANGELOG.rst) | -| 1.9.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-9-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.0/CHANGELOG.rst) | +| 1.9.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-9-2) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.2/CHANGELOG.rst) | | 1.8.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-8-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | | 1.7.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-7-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | | 1.6.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-6-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.6.0/CHANGELOG.rst) | diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index e5bd167b7..dcc631cd0 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -135,4 +135,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.10.0-beta.1 +version: 1.11.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 4d9648079..3c48425d7 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -259,6 +259,131 @@ releases: - 992-fix-sanity4to6.yml - v1.10.0-beta.1_summary.yml release_date: '2024-05-08' + 1.11.0-beta.1: + changes: + bugfixes: + - module_util/data_set.py - DataSet.data_set_cataloged function previously only + returned True or False, but failed to account for exceptions which occurred + during the LISTCAT. The fix now raises an MVSCmdExecError if the return code + from LISTCAT is too high. (https://github.com/ansible-collections/ibm_zos_core/pull/1535). + - zos_copy - a regression in version 1.4.0 made the module stop automatically + computing member names when copying a single file into a PDS/E. Fix now lets + a user copy a single file into a PDS/E without adding a member in the dest + option. (https://github.com/ansible-collections/ibm_zos_core/pull/1570). + - zos_copy - module would use opercmd to check if a non existent destination + data set is locked. Fix now only checks if the destination is already present. + (https://github.com/ansible-collections/ibm_zos_core/pull/1623). + - zos_job_submit - Was not propagating any error types UnicodeDecodeError, JSONDecodeError, + TypeError, KeyError when encountered, now the error message shares the type + error. (https://github.com/ansible-collections/ibm_zos_core/pull/1560). + - zos_mvs_raw - DD_output first character from each line was missing. Change + now includes the first character of each line. (https://github.com/ansible-collections/ibm_zos_core/pull/1543). + minor_changes: + - zos_apf - Change input to auto-escape 'library' names containing symbols (https://github.com/ansible-collections/ibm_zos_core/pull/1493). + - zos_archive - Added support for GDG and GDS relative name notation to archive + data sets. Added support for data set names with special characters like $, + /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). + - zos_backup_restore - Added support for GDS relative name notation to include or + exclude data sets when operation is backup. Added support for data set names + with special characters like $, /#, and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1527). + - zos_blockinfile - Added support for GDG and GDS relative name notation to + use a data set. And backup in new generations. Added support for data set + names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). + - zos_copy - add support for copying generation data sets (GDS) and generation + data groups (GDG), as well as using a GDS for backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1564). + - zos_data_set - Added support for GDG and GDS relative name notation to create, + delete, catalog and uncatalog a data set. Added support for data set names + with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1504). + - zos_encode - add support for encoding generation data sets (GDS), as well + as using a GDS for backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1531). + - zos_fetch - add support for fetching generation data groups and generation + data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1519) + - zos_find - added support for GDG/GDS and special characters (https://github.com/ansible-collections/ibm_zos_core/pull/1518). + - zos_job_submit - Improved the copy to remote mechanic to avoid using deepcopy + that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + - zos_job_submit - add support for generation data groups and generation data + sets as sources for jobs. (https://github.com/ansible-collections/ibm_zos_core/pull/1497) + - zos_lineinfile - Added support for GDG and GDS relative name notation to use + a data set. And backup in new generations. Added support for data set names + with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). + - zos_mount - Added support for data set names with special characters ($, /#, + /- and @). This is for both src and backup data set names. (https://github.com/ansible-collections/ibm_zos_core/pull/1631). + - zos_mvs_raw - Added support for GDG and GDS relative name notation to use + a data set. Added support for data set names with special characters like + $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1525). + - zos_mvs_raw - Added support for GDG and GDS relative positive name notation + to use a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1541). + - zos_mvs_raw - Redesign the wrappers of dd clases to use properly the arguments. + (https://github.com/ansible-collections/ibm_zos_core/pull/1470). + - zos_tso_command - Added support for GDG and GDS relative name notation to use + a data set name. Added support for data set names with special characters + like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1563). + - zos_script - Improved the copy to remote mechanic to avoid using deepcopy + that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + - zos_unarchive - Added support for data set names with special characters like + $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). + - zos_unarchive - Improved the copy to remote mechanic to avoid using deepcopy + that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + release_summary: 'Release Date: ''2024-08-05'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1170-enhancememt-make-pipeline-217-compatible.yml + - 1323-Update_docstring-dd_statement.yml + - 1334-update-docstring-mcs_cmd.yml + - 1335-update-docstring-template.yml + - 1337-update-docstring-vtoc.yml + - 1338-update-docstring-zoau_version_checker.yml + - 1342-update-docstring-zos_backup_restore.yml + - 1343-update-docstring-zos_blockinline.yml + - 1344-update-docstring-zos_copy.yml + - 1361-update-docstring-zos_operator.yml + - 1362-update-docstring-file.yml + - 1363-update-docstring-system.yml + - 1374-enhancement-zos-find-gdg-gds-special-chars.yml + - 1380-enhancement-add-sybols-zos_apf.yml + - 1384-update-docstring-backup.yml + - 1385-update-docstring-better_arg_parser.yml + - 1386-gdg-symbols-support.yml + - 1387-update-docstring-copy.yml + - 1415-Update_docstring-zos_archive.yml + - 1470-redesign_mvs_raw.yml + - 1484-update-ac-tool-ansible-lint.yml + - 1488-zos_copy-refactor-force.yml + - 1495-default-values-data-set-class.yml + - 1496-fix-gds-resolve.yml + - 1497-gdg-support-zos-job-submit.yml + - 1504-zos_data_set-gdg-support.yml + - 1507-zos_operator-docs.yml + - 1511-zos_archive_unarchive-gdg-support.yml + - 1512-bugfix-zos_job_submit-error-type.yml + - 1515-gdg_batch_creation.yml + - 1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml + - 1519-zos_fetch-gdg-support.yml + - 1525-mvs_raw_support_gdg_gds_special_character.yml + - 1527-zos_backup-gdg.yml + - 1531-zos_encode_gdg_support.yml + - 1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml + - 1541-output_mvs_raw_gds_positive_was_false_positive.yml + - 1543-mvs_raw_fix_verbose_and_first_character.yml + - 1550-lower_case_idcams_utility.yml + - 1552-readme-support-updates.yml + - 1553-Console_parallel.yml + - 1561-remove_deep_copy.yml + - 1563-zos_tso_command-gdg-support.yml + - 1564-zos_copy_gdg_support.yml + - 1565-remove-deprecated-pipes-library.yml + - 1570-compute-member-name-zos_copy.yml + - 1623-zos_copy-avoid-opercmd.yml + - 1631-enabler-zos_mount-special-character-support.yml + - v1.11.0-beta.1_summary.yml + release_date: '2024-08-05' 1.2.1: changes: bugfixes: diff --git a/changelogs/fragments/v1.10.0_summary.yml b/changelogs/fragments/v1.11.0-beta.1_summary.yml similarity index 92% rename from changelogs/fragments/v1.10.0_summary.yml rename to changelogs/fragments/v1.11.0-beta.1_summary.yml index 129c40746..5c1d60f94 100644 --- a/changelogs/fragments/v1.10.0_summary.yml +++ b/changelogs/fragments/v1.11.0-beta.1_summary.yml @@ -1,5 +1,5 @@ release_summary: | - Release Date: '2024-06-11' + Release Date: '2024-08-05' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index 265d3fff5..a94fdc95e 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -37,7 +37,7 @@ library state - Ensure that the library is added \ :literal:`state=present`\ or removed \ :literal:`state=absent`\ . + Ensure that the library is added ``state=present`` or removed ``state=absent``. The APF list format has to be "DYNAMIC". @@ -58,24 +58,24 @@ force_dynamic volume - The identifier for the volume containing the library specified in the \ :literal:`library`\ parameter. The values must be one the following. + The identifier for the volume containing the library specified in the ``library`` parameter. The values must be one the following. 1. The volume serial number. - 2. Six asterisks \ :literal:`\*\*\*\*\*\*`\ , indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + If ``volume`` is not specified, ``library`` has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If \ :literal:`sms=True`\ , \ :literal:`volume`\ value will be ignored. + If ``sms=True``, ``volume`` value will be ignored. | **required**: False | **type**: bool @@ -83,13 +83,13 @@ sms operation - Change APF list format to "DYNAMIC" \ :literal:`operation=set\_dynamic`\ or "STATIC" \ :literal:`operation=set\_static`\ + Change APF list format to "DYNAMIC" ``operation=set_dynamic`` or "STATIC" ``operation=set_static`` - Display APF list current format \ :literal:`operation=check\_format`\ + Display APF list current format ``operation=check_format`` - Display APF list entries when \ :literal:`operation=list`\ \ :literal:`library`\ , \ :literal:`volume`\ and \ :literal:`sms`\ will be used as filters. + Display APF list entries when ``operation=list`` ``library``, ``volume`` and ``sms`` will be used as filters. - If \ :literal:`operation`\ is not set, add or remove operation will be ignored. + If ``operation`` is not set, add or remove operation will be ignored. | **required**: False | **type**: str @@ -99,23 +99,23 @@ operation tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str persistent - Add/remove persistent entries to or from \ :emphasis:`data\_set\_name`\ + Add/remove persistent entries to or from *data_set_name* - \ :literal:`library`\ will not be persisted or removed if \ :literal:`persistent=None`\ + ``library`` will not be persisted or removed if ``persistent=None`` | **required**: False | **type**: dict data_set_name - The data set name used for persisting or removing a \ :literal:`library`\ from the APF list. + The data set name used for persisting or removing a ``library`` from the APF list. | **required**: True | **type**: str @@ -124,13 +124,13 @@ persistent marker The marker line template. - \ :literal:`{mark}`\ will be replaced with "BEGIN" and "END". + ``{mark}`` will be replaced with "BEGIN" and "END". - Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. + Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs. - \ :literal:`{mark}`\ length may not exceed 72 characters. + ``{mark}`` length may not exceed 72 characters. - The timestamp (\<timestamp\>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format + The timestamp (<timestamp>) used in the default marker follows the '+%Y%m%d-%H%M%S' date format | **required**: False | **type**: str @@ -138,9 +138,9 @@ persistent backup - Creates a backup file or backup data set for \ :emphasis:`data\_set\_name`\ , including the timestamp information to ensure that you retrieve the original APF list defined in \ :emphasis:`data\_set\_name`\ ". + Creates a backup file or backup data set for *data_set_name*, including the timestamp information to ensure that you retrieve the original APF list defined in *data_set_name*". - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + *backup_name* can be used to specify a backup file name if *backup=true*. The backup file name will be return on either success or failure of module execution such that data can be retrieved. @@ -152,11 +152,11 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source \ :emphasis:`data\_set\_name`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source *data_set_name* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup\_name must be an MVS data set name. + If the source is an MVS data set, the backup_name must be an MVS data set name. - If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -168,9 +168,9 @@ persistent batch A list of dictionaries for adding or removing libraries. - This is mutually exclusive with \ :literal:`library`\ , \ :literal:`volume`\ , \ :literal:`sms`\ + This is mutually exclusive with ``library``, ``volume``, ``sms`` - Can be used with \ :literal:`persistent`\ + Can be used with ``persistent`` | **required**: False | **type**: list @@ -185,24 +185,24 @@ batch volume - The identifier for the volume containing the library specified on the \ :literal:`library`\ parameter. The values must be one of the following. + The identifier for the volume containing the library specified on the ``library`` parameter. The values must be one of the following. 1. The volume serial number - 2. Six asterisks \ :literal:`\*\*\*\*\*\*`\ , indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. + 2. Six asterisks ``******``, indicating that the system must use the volume serial number of the current system residence (SYSRES) volume. - 3. \*MCAT\*, indicating that the system must use the volume serial number of the volume containing the master catalog. + 3. *MCAT*, indicating that the system must use the volume serial number of the volume containing the master catalog. - If \ :literal:`volume`\ is not specified, \ :literal:`library`\ has to be cataloged. + If ``volume`` is not specified, ``library`` has to be cataloged. | **required**: False | **type**: str sms - Indicates that the library specified in the \ :literal:`library`\ parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. + Indicates that the library specified in the ``library`` parameter is managed by the storage management subsystem (SMS), and therefore no volume is associated with the library. - If true \ :literal:`volume`\ will be ignored. + If true ``volume`` will be ignored. | **required**: False | **type**: bool @@ -283,9 +283,9 @@ Return Values stdout The stdout from ZOAU command apfadm. Output varies based on the type of operation. - state\> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm + state> stdout of the executed operator command (opercmd), "SETPROG" from ZOAU command apfadm - operation\> stdout of operation options list\> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set\_dynamic\> Set to DYNAMIC set\_static\> Set to STATIC check\_format\> DYNAMIC or STATIC + operation> stdout of operation options list> Returns a list of dictionaries of APF list entries [{'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFHAUTH'}, {'vol': 'PP0L6P', 'ds': 'DFH.V5R3M0.CICS.SDFJAUTH'}, ...] set_dynamic> Set to DYNAMIC set_static> Set to STATIC check_format> DYNAMIC or STATIC | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index b900fdcdb..bca1c5e82 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -20,7 +20,7 @@ Synopsis - Sources for archiving must be on the remote z/OS system. - Supported sources are USS (UNIX System Services) or z/OS data sets. - The archive remains on the remote z/OS system. -- For supported archive formats, see option \ :literal:`format`\ . +- For supported archive formats, see option ``format``. @@ -35,7 +35,9 @@ src USS file paths should be absolute paths. - MVS data sets supported types are: \ :literal:`SEQ`\ , \ :literal:`PDS`\ , \ :literal:`PDSE`\ . + GDS relative notation is supported. + + MVS data sets supported types are: ``SEQ``, ``PDS``, ``PDSE``. VSAMs are not supported. @@ -68,7 +70,7 @@ format terse_pack - Compression option for use with the terse format, \ :emphasis:`name=terse`\ . + Compression option for use with the terse format, *name=terse*. Pack will compress records in a data set so that the output results in lossless data compression. @@ -88,14 +90,14 @@ format If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB - When providing the \ :emphasis:`xmit\_log\_data\_set`\ name, ensure there is adequate space. + When providing the *xmit_log_data_set* name, ensure there is adequate space. | **required**: False | **type**: str use_adrdssu - If set to true, the \ :literal:`zos\_archive`\ module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using \ :literal:`xmit`\ or \ :literal:`terse`\ . + If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using ``xmit`` or ``terse``. | **required**: False | **type**: bool @@ -107,30 +109,30 @@ format dest The remote absolute path or data set where the archive should be created. - \ :emphasis:`dest`\ can be a USS file or MVS data set name. + *dest* can be a USS file or MVS data set name. - If \ :emphasis:`dest`\ has missing parent directories, they will be created. + If *dest* has missing parent directories, they will be created. - If \ :emphasis:`dest`\ is a nonexistent USS file, it will be created. + If *dest* is a nonexistent USS file, it will be created. - If \ :emphasis:`dest`\ is an existing file or data set and \ :emphasis:`force=true`\ , the existing \ :emphasis:`dest`\ will be deleted and recreated with attributes defined in the \ :emphasis:`dest\_data\_set`\ option or computed by the module. + If *dest* is an existing file or data set and *force=true*, the existing *dest* will be deleted and recreated with attributes defined in the *dest_data_set* option or computed by the module. - If \ :emphasis:`dest`\ is an existing file or data set and \ :emphasis:`force=false`\ or not specified, the module exits with a note to the user. + If *dest* is an existing file or data set and *force=false* or not specified, the module exits with a note to the user. - Destination data set attributes can be set using \ :emphasis:`dest\_data\_set`\ . + Destination data set attributes can be set using *dest_data_set*. - Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the \ :emphasis:`dest\_data\_set`\ option will improve performance. + Destination data set space will be calculated based on space of source data sets provided and/or found by expanding the pattern name. Calculating space can impact module performance. Specifying space attributes in the *dest_data_set* option will improve performance. | **required**: True | **type**: str exclude - Remote absolute path, glob, or list of paths, globs or data set name patterns for the file, files or data sets to exclude from src list and glob expansion. + Remote absolute path, glob, or list of paths, globs, data set name patterns or generation data sets (GDSs) in relative notation for the file, files or data sets to exclude from src list and glob expansion. - Patterns (wildcards) can contain one of the following, \`?\`, \`\*\`. + Patterns (wildcards) can contain one of the following, `?`, `*`. - \* matches everything. + * matches everything. ? matches any single character. @@ -144,7 +146,7 @@ group When left unspecified, it uses the current group of the current use unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str @@ -153,13 +155,13 @@ group mode The permission of the destination archive file. - If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. The mode may also be specified as a symbolic mode (for example, 'u+rwx' or 'u=rw,g=r,o=r') or a special string 'preserve'. - \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the src file. + *mode=preserve* means that the file will be given the same permissions as the src file. | **required**: False | **type**: str @@ -170,14 +172,14 @@ owner When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str remove - Remove any added source files , trees or data sets after module \ `zos\_archive <./zos_archive.html>`__\ adds them to the archive. Source files, trees and data sets are identified with option \ :emphasis:`src`\ . + Remove any added source files , trees or data sets after module `zos_archive <./zos_archive.html>`_ adds them to the archive. Source files, trees and data sets are identified with option *src*. | **required**: False | **type**: bool @@ -185,7 +187,7 @@ remove dest_data_set - Data set attributes to customize a \ :literal:`dest`\ data set to be archived into. + Data set attributes to customize a ``dest`` data set to be archived into. | **required**: False | **type**: dict @@ -208,18 +210,18 @@ dest_data_set space_primary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int space_secondary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -228,7 +230,7 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -236,7 +238,7 @@ dest_data_set record_format - If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`FB`\ ) + If the destination data set does not exist, this sets the format of the data set. (e.g ``FB``) Choices are case-sensitive. @@ -313,18 +315,18 @@ dest_data_set tmp_hlq Override the default high level qualifier (HLQ) for temporary data sets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. | **required**: False | **type**: str force - If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ will be deleted. Otherwise it will be created with the \ :literal:`dest\_data\_set`\ attributes or default values if \ :literal:`dest\_data\_set`\ is not specified. + If set to ``true`` and the remote file or data set ``dest`` will be deleted. Otherwise it will be created with the ``dest_data_set`` attributes or default values if ``dest_data_set`` is not specified. - If set to \ :literal:`false`\ , the file or data set will only be copied if the destination does not exist. + If set to ``false``, the file or data set will only be copied if the destination does not exist. - If set to \ :literal:`false`\ and destination exists, the module exits with a note to the user. + If set to ``false`` and destination exists, the module exits with a note to the user. | **required**: False | **type**: bool @@ -348,7 +350,7 @@ Examples name: tar # Archive multiple files - - name: Compress list of files into a zip + - name: Archive list of files into a zip zos_archive: src: - /tmp/archive/foo.txt @@ -358,7 +360,7 @@ Examples name: zip # Archive one data set into terse - - name: Compress data set into a terse + - name: Archive data set into a terse zos_archive: src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" @@ -366,7 +368,7 @@ Examples name: terse # Use terse with different options - - name: Compress data set into a terse, specify pack algorithm and use adrdssu + - name: Archive data set into a terse, specify pack algorithm and use adrdssu zos_archive: src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" @@ -377,7 +379,7 @@ Examples use_adrdssu: true # Use a pattern to store - - name: Compress data set pattern using xmit + - name: Archive data set pattern using xmit zos_archive: src: "USER.ARCHIVE.*" exclude_sources: "USER.ARCHIVE.EXCLUDE.*" @@ -385,6 +387,27 @@ Examples format: name: xmit + - name: Archive multiple GDSs into a terse + zos_archive: + src: + - "USER.GDG(0)" + - "USER.GDG(-1)" + - "USER.GDG(-2)" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + format_options: + use_adrdssu: true + + - name: Archive multiple data sets into a new GDS + zos_archive: + src: "USER.ARCHIVE.*" + dest: "USER.GDG(+1)" + format: + name: terse + format_options: + use_adrdssu: true + @@ -392,11 +415,11 @@ Notes ----- .. note:: - This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos\_fetch to retrieve to the controller and then zos\_copy or zos\_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. + This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos_fetch to retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. - When packing and using \ :literal:`use\_adrdssu`\ flag the module will take up to two times the space indicated in \ :literal:`dest\_data\_set`\ . + When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. - tar, zip, bz2 and pax are archived using python \ :literal:`tarfile`\ library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. + tar, zip, bz2 and pax are archived using python ``tarfile`` library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. @@ -416,27 +439,27 @@ Return Values state - The state of the input \ :literal:`src`\ . + The state of the input ``src``. - \ :literal:`absent`\ when the source files or data sets were removed. + ``absent`` when the source files or data sets were removed. - \ :literal:`present`\ when the source files or data sets were not removed. + ``present`` when the source files or data sets were not removed. - \ :literal:`incomplete`\ when \ :literal:`remove`\ was true and the source files or data sets were not removed. + ``incomplete`` when ``remove`` was true and the source files or data sets were not removed. | **returned**: always | **type**: str dest_state - The state of the \ :emphasis:`dest`\ file or data set. + The state of the *dest* file or data set. - \ :literal:`absent`\ when the file does not exist. + ``absent`` when the file does not exist. - \ :literal:`archive`\ when the file is an archive. + ``archive`` when the file is an archive. - \ :literal:`compress`\ when the file is compressed, but not an archive. + ``compress`` when the file is compressed, but not an archive. - \ :literal:`incomplete`\ when the file is an archive, but some files under \ :emphasis:`src`\ were not found. + ``incomplete`` when the file is an archive, but some files under *src* were not found. | **returned**: success | **type**: str @@ -454,7 +477,7 @@ archived | **type**: list arcroot - If \ :literal:`src`\ is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. + If ``src`` is a list of USS files, this returns the top most parent folder of the list of files, otherwise is empty. | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 8cd6f756c..fdd98d0f8 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -33,14 +33,16 @@ src The USS file must be an absolute pathname. + Generation data set (GDS) relative name of generation already created. ``e.g. SOME.CREATION(-1).`` + | **required**: True | **type**: str state - Whether the block should be inserted or replaced using \ :emphasis:`state=present`\ . + Whether the block should be inserted or replaced using *state=present*. - Whether the block should be removed using \ :emphasis:`state=absent`\ . + Whether the block should be removed using *state=absent*. | **required**: False | **type**: str @@ -51,9 +53,9 @@ state marker The marker line template. - \ :literal:`{mark}`\ will be replaced with the values \ :literal:`in marker\_begin`\ (default="BEGIN") and \ :literal:`marker\_end`\ (default="END"). + ``{mark}`` will be replaced with the values ``in marker_begin`` (default="BEGIN") and ``marker_end`` (default="END"). - Using a custom marker without the \ :literal:`{mark}`\ variable may result in the block being repeatedly inserted on subsequent playbook runs. + Using a custom marker without the ``{mark}`` variable may result in the block being repeatedly inserted on subsequent playbook runs. | **required**: False | **type**: str @@ -63,7 +65,7 @@ marker block The text to insert inside the marker lines. - Multi-line can be separated by '\\n'. + Multi-line can be separated by '\n'. Any double-quotation marks will be removed. @@ -74,11 +76,11 @@ block insertafter If specified, the block will be inserted after the last match of the specified regular expression. - A special value \ :literal:`EOF`\ for inserting a block at the end of the file is available. + A special value ``EOF`` for inserting a block at the end of the file is available. - If a specified regular expression has no matches, \ :literal:`EOF`\ will be used instead. + If a specified regular expression has no matches, ``EOF`` will be used instead. - Choices are EOF or '\*regex\*'. + Choices are EOF or '*regex*'. Default is EOF. @@ -89,18 +91,18 @@ insertafter insertbefore If specified, the block will be inserted before the last match of specified regular expression. - A special value \ :literal:`BOF`\ for inserting the block at the beginning of the file is available. + A special value ``BOF`` for inserting the block at the beginning of the file is available. If a specified regular expression has no matches, the block will be inserted at the end of the file. - Choices are BOF or '\*regex\*'. + Choices are BOF or '*regex*'. | **required**: False | **type**: str marker_begin - This will be inserted at \ :literal:`{mark}`\ in the opening ansible block marker. + This will be inserted at ``{mark}`` in the opening ansible block marker. | **required**: False | **type**: str @@ -108,7 +110,7 @@ marker_begin marker_end - This will be inserted at \ :literal:`{mark}`\ in the closing ansible block marker. + This will be inserted at ``{mark}`` in the closing ansible block marker. | **required**: False | **type**: str @@ -116,12 +118,14 @@ marker_end backup - Specifies whether a backup of destination should be created before editing the source \ :emphasis:`src`\ . + Specifies whether a backup of destination should be created before editing the source *src*. - When set to \ :literal:`true`\ , the module creates a backup file or data set. + When set to ``true``, the module creates a backup file or data set. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. + Use generation data set (GDS) relative positive name. ``e.g. SOME.CREATION(+1``) + | **required**: False | **type**: bool | **default**: False @@ -130,15 +134,15 @@ backup backup_name Specify the USS file name or data set name for the destination backup. - If the source \ :emphasis:`src`\ is a USS file or path, the backup\_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source *src* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup\_name name must be an MVS data set name, and the dataset must not be preallocated. + If the source is an MVS data set, the backup_name name must be an MVS data set name, and the dataset must not be preallocated. - If the backup\_name is not provided, the default backup\_name name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + If the backup_name is not provided, the default backup_name name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. - If \ :emphasis:`src`\ is a data set member and backup\_name is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + If *src* is a data set member and backup_name is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. | **required**: False | **type**: str @@ -147,14 +151,14 @@ backup_name tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str encoding - The character set of the source \ :emphasis:`src`\ . \ `zos\_blockinfile <./zos_blockinfile.html>`__\ requires it to be provided with correct encoding to read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. + The character set of the source *src*. `zos_blockinfile <./zos_blockinfile.html>`_ requires it to be provided with correct encoding to read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -168,7 +172,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The \ :literal:`force`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . + The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. | **required**: False | **type**: bool @@ -281,6 +285,20 @@ Examples marker_end: "End Ansible Block Insertion 2" block: "{{ CONTENT }}" + - name: Add a block to a gds + zos_blockinfile: + src: TEST.SOME.CREATION(0) + insertafter: EOF + block: "{{ CONTENT }}" + + - name: Add a block to dataset and backup in a new generation of gds + zos_blockinfile: + src: SOME.CREATION.TEST + insertbefore: BOF + backup: true + backup_name: CREATION.GDS(+1) + block: "{{ CONTENT }}" + @@ -290,13 +308,13 @@ Notes .. note:: It is the playbook author or user's responsibility to avoid files that should not be encoded, such as binary files. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. - All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. The \ `zos\_data\_set <./zos_data_set.html>`__\ module can be used to catalog uncataloged data sets. + All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. The `zos_data_set <./zos_data_set.html>`_ module can be used to catalog uncataloged data sets. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - When using \`\`with\_\*\`\` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. + When using ``with_*`` loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. - When more then one block should be handled in a file you must change the \ :emphasis:`marker`\ per task. + When more then one block should be handled in a file you must change the *marker* per task. @@ -315,7 +333,7 @@ Return Values changed - Indicates if the source was modified. Value of 1 represents \`true\`, otherwise \`false\`. + Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 69639e39a..8e8cb42bf 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -16,7 +16,7 @@ zos_copy -- Copy data to z/OS Synopsis -------- -- The \ `zos\_copy <./zos_copy.html>`__\ module copies a file or data set from a local or a remote machine to a location on the remote machine. +- The `zos_copy <./zos_copy.html>`_ module copies a file or data set from a local or a remote machine to a location on the remote machine. @@ -27,17 +27,17 @@ Parameters asa_text - If set to \ :literal:`true`\ , indicates that either \ :literal:`src`\ or \ :literal:`dest`\ or both contain ASA control characters. + If set to ``true``, indicates that either ``src`` or ``dest`` or both contain ASA control characters. - When \ :literal:`src`\ is a USS file and \ :literal:`dest`\ is a data set, the copy will preserve ASA control characters in the destination. + When ``src`` is a USS file and ``dest`` is a data set, the copy will preserve ASA control characters in the destination. - When \ :literal:`src`\ is a data set containing ASA control characters and \ :literal:`dest`\ is a USS file, the copy will put all control characters as plain text in the destination. + When ``src`` is a data set containing ASA control characters and ``dest`` is a USS file, the copy will put all control characters as plain text in the destination. - If \ :literal:`dest`\ is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). + If ``dest`` is a non-existent data set, it will be created with record format Fixed Block with ANSI format (FBA). - If neither \ :literal:`src`\ or \ :literal:`dest`\ have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. + If neither ``src`` or ``dest`` have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. - This option is only valid for text files. If \ :literal:`is\_binary`\ is \ :literal:`true`\ or \ :literal:`executable`\ is \ :literal:`true`\ as well, the module will fail. + This option is only valid for text files. If ``is_binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. | **required**: False | **type**: bool @@ -47,7 +47,7 @@ asa_text backup Specifies whether a backup of the destination should be created before copying data. - When set to \ :literal:`true`\ , the module creates a backup file or data set. + When set to ``true``, the module creates a backup file or data set. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -59,26 +59,26 @@ backup backup_name Specify a unique USS file name or data set name for the destination backup. - If the destination \ :literal:`dest`\ is a USS file or path, the \ :literal:`backup\_name`\ must be an absolute path name. + If the destination ``dest`` is a USS file or path, the ``backup_name`` must be an absolute path name. - If the destination is an MVS data set name, the \ :literal:`backup\_name`\ provided must meet data set naming conventions of one or more qualifiers, each from one to eight characters long, that are delimited by periods. + If the destination is an MVS data set name, the ``backup_name`` provided must meet data set naming conventions of one or more qualifiers, each from one to eight characters long, that are delimited by periods. - If the \ :literal:`backup\_name`\ is not provided, the default \ :literal:`backup\_name`\ will be used. If the \ :literal:`dest`\ is a USS file or USS path, the name of the backup file will be the destination file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . If the \ :literal:`dest`\ is an MVS data set, it will be a data set with a randomly generated name. + If the ``backup_name`` is not provided, the default ``backup_name`` will be used. If the ``dest`` is a USS file or USS path, the name of the backup file will be the destination file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the ``dest`` is an MVS data set, it will be a data set with a randomly generated name. - If \ :literal:`dest`\ is a data set member and \ :literal:`backup\_name`\ is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. + If ``dest`` is a data set member and ``backup_name`` is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. - If \ :emphasis:`backup\_name`\ is a generation data set (GDS), it must be a relative positive name (for example, \ :literal:`HLQ.USER.GDG(+1)`\ ). + If *backup_name* is a generation data set (GDS), it must be a relative positive name (for example, V(HLQ.USER.GDG(+1\))). | **required**: False | **type**: str content - When used instead of \ :literal:`src`\ , sets the contents of a file or data set directly to the specified value. + When used instead of ``src``, sets the contents of a file or data set directly to the specified value. - Works only when \ :literal:`dest`\ is a USS file, sequential data set, or a partitioned data set member. + Works only when ``dest`` is a USS file, sequential data set, or a partitioned data set member. - If \ :literal:`dest`\ is a directory, then content will be copied to \ :literal:`/path/to/dest/inline\_copy`\ . + If ``dest`` is a directory, then content will be copied to ``/path/to/dest/inline_copy``. | **required**: False | **type**: str @@ -87,31 +87,33 @@ content dest The remote absolute path or data set where the content should be copied to. - \ :literal:`dest`\ can be a USS file, directory or MVS data set name. + ``dest`` can be a USS file, directory or MVS data set name. - If \ :literal:`dest`\ has missing parent directories, they will be created. + If ``dest`` has missing parent directories, they will be created. - If \ :literal:`dest`\ is a nonexistent USS file, it will be created. + If ``dest`` is a nonexistent USS file, it will be created. - If \ :literal:`dest`\ is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the module will fail. + If ``dest`` is a new USS file or replacement, the file will be appropriately tagged with either the system's default locale or the encoding option defined. If the USS file is a replacement, the user must have write authority to the file either through ownership, group or other permissions, else the module will fail. - If \ :literal:`dest`\ is a nonexistent data set, it will be created following the process outlined here and in the \ :literal:`volume`\ option. + If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. - If \ :literal:`dest`\ is a nonexistent data set, the attributes assigned will depend on the type of \ :literal:`src`\ . If \ :literal:`src`\ is a USS file, \ :literal:`dest`\ will have a Fixed Block (FB) record format and the remaining attributes will be computed. If \ :emphasis:`is\_binary=true`\ , \ :literal:`dest`\ will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If \ :emphasis:`executable=true`\ ,\ :literal:`dest`\ will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. + If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32760, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. - When \ :literal:`dest`\ is a data set, precedence rules apply. If \ :literal:`dest\_data\_set`\ is set, this will take precedence over an existing data set. If \ :literal:`dest`\ is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, \ :literal:`dest`\ will be created with the same attributes of \ :literal:`src`\ . + If ``src`` is a file and ``dest`` a partitioned data set, ``dest`` does not need to include a member in its value, the module can automatically compute the resulting member name from ``src``. - When the \ :literal:`dest`\ is an existing VSAM (KSDS) or VSAM (ESDS), then source can be an ESDS, a KSDS or an RRDS. The VSAM (KSDS) or VSAM (ESDS) \ :literal:`dest`\ will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. + When ``dest`` is a data set, precedence rules apply. If ``dest_data_set`` is set, this will take precedence over an existing data set. If ``dest`` is an empty data set, the empty data set will be written with the expectation its attributes satisfy the copy. Lastly, if no precendent rule has been exercised, ``dest`` will be created with the same attributes of ``src``. - When the \ :literal:`dest`\ is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. + When the ``dest`` is an existing VSAM (KSDS) or VSAM (ESDS), then source can be an ESDS, a KSDS or an RRDS. The VSAM (KSDS) or VSAM (ESDS) ``dest`` will be deleted and recreated following the process outlined in the ``volume`` option. - When \ :literal:`dest`\ is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the \ :literal:`volume`\ option. + When the ``dest`` is an existing VSAM (RRDS), then the source must be an RRDS. The VSAM (RRDS) will be deleted and recreated following the process outlined in the ``volume`` option. - \ :literal:`dest`\ can be a previously allocated generation data set (GDS) or a new GDS. + When ``dest`` is and existing VSAM (LDS), then source must be an LDS. The VSAM (LDS) will be deleted and recreated following the process outlined in the ``volume`` option. - When \ :literal:`dest`\ is a generation data group (GDG), \ :literal:`src`\ must be a GDG too. The copy will allocate successive new generations in \ :literal:`dest`\ , the module will verify it has enough available generations before starting the copy operations. + ``dest`` can be a previously allocated generation data set (GDS) or a new GDS. - When \ :literal:`dest`\ is a data set, you can override storage management rules by specifying \ :literal:`volume`\ if the storage class being used has GUARANTEED\_SPACE=YES specified, otherwise, the allocation will fail. See \ :literal:`volume`\ for more volume related processes. + When ``dest`` is a generation data group (GDG), ``src`` must be a GDG too. The copy will allocate successive new generations in ``dest``, the module will verify it has enough available generations before starting the copy operations. + + When ``dest`` is a data set, you can override storage management rules by specifying ``volume`` if the storage class being used has GUARANTEED_SPACE=YES specified, otherwise, the allocation will fail. See ``volume`` for more volume related processes. | **required**: True | **type**: str @@ -120,9 +122,9 @@ dest encoding Specifies which encodings the destination file or data set should be converted from and to. - If \ :literal:`encoding`\ is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. + If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. - Only valid if \ :literal:`is\_binary`\ is false. + Only valid if ``is_binary`` is false. | **required**: False | **type**: dict @@ -146,22 +148,22 @@ encoding tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str force - If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ is empty, the \ :literal:`dest`\ will be reused. + If set to ``true`` and the remote file or data set ``dest`` is empty, the ``dest`` will be reused. - If set to \ :literal:`true`\ and the remote file or data set \ :literal:`dest`\ is NOT empty, the \ :literal:`dest`\ will be deleted and recreated with the \ :literal:`src`\ data set attributes, otherwise it will be recreated with the \ :literal:`dest`\ data set attributes. + If set to ``true`` and the remote file or data set ``dest`` is NOT empty, the ``dest`` will be deleted and recreated with the ``src`` data set attributes, otherwise it will be recreated with the ``dest`` data set attributes. - To backup data before any deletion, see parameters \ :literal:`backup`\ and \ :literal:`backup\_name`\ . + To backup data before any deletion, see parameters ``backup`` and ``backup_name``. - If set to \ :literal:`false`\ , the file or data set will only be copied if the destination does not exist. + If set to ``false``, the file or data set will only be copied if the destination does not exist. - If set to \ :literal:`false`\ and destination exists, the module exits with a note to the user. + If set to ``false`` and destination exists, the module exits with a note to the user. | **required**: False | **type**: bool @@ -169,11 +171,11 @@ force force_lock - By default, when \ :literal:`dest`\ is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use \ :literal:`force\_lock`\ to bypass this check and continue with copy. + By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass this check and continue with copy. - If set to \ :literal:`true`\ and destination is a MVS data set opened by another process then zos\_copy will try to copy using DISP=SHR. + If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. - Using \ :literal:`force\_lock`\ uses operations that are subject to race conditions and can lead to data loss, use with caution. + Using ``force_lock`` uses operations that are subject to race conditions and can lead to data loss, use with caution. If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in the aliases not being preserved in the target dataset. When this scenario occurs the module will fail. @@ -183,21 +185,21 @@ force_lock ignore_sftp_stderr - During data transfer through SFTP, the module fails if the SFTP command directs any content to stderr. The user is able to override this behavior by setting this parameter to \ :literal:`true`\ . By doing so, the module would essentially ignore the stderr stream produced by SFTP and continue execution. + During data transfer through SFTP, the SFTP command directs content to stderr. By default, the module essentially ignores the stderr stream produced by SFTP and continues execution. The user is able to override this behavior by setting this parameter to ``false``. By doing so, any content written to stderr is considered an error by Ansible and will have module fail. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using \ :strong:`-vvvv`\ or through environment variables such as \ :strong:`verbosity = 4`\ , then this parameter will automatically be set to \ :literal:`true`\ . + When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. | **required**: False | **type**: bool - | **default**: False + | **default**: True is_binary - If set to \ :literal:`true`\ , indicates that the file or data set to be copied is a binary file or data set. + If set to ``true``, indicates that the file or data set to be copied is a binary file or data set. - When \ :emphasis:`is\_binary=true`\ , no encoding conversion is applied to the content, all content transferred retains the original state. + When *is_binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. - Use \ :emphasis:`is\_binary=true`\ when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. + Use *is_binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. | **required**: False | **type**: bool @@ -205,15 +207,15 @@ is_binary executable - If set to \ :literal:`true`\ , indicates that the file or library to be copied is an executable. + If set to ``true``, indicates that the file or library to be copied is an executable. - If the \ :literal:`src`\ executable has an alias, the alias information is also copied. If the \ :literal:`dest`\ is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. + If the ``src`` executable has an alias, the alias information is also copied. If the ``dest`` is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. - If \ :emphasis:`executable=true`\ , and \ :literal:`dest`\ is a data set, it must be a PDS or PDSE (library). + If *executable=true*, and ``dest`` is a data set, it must be a PDS or PDSE (library). - If \ :literal:`dest`\ is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. + If ``dest`` is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. - If \ :literal:`dest`\ is a file, execute permission for the user will be added to the file (\`\`u+x\`\`). + If ``dest`` is a file, execute permission for the user will be added to the file (``u+x``). | **required**: False | **type**: bool @@ -221,9 +223,9 @@ executable aliases - If set to \ :literal:`true`\ , indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. + If set to ``true``, indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. - Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when \ :literal:`executable=True`\ and \ :literal:`dest`\ is a USS file or directory, this option will be ignored. + Aliases are implicitly preserved when libraries are copied over to USS destinations. That is, when ``executable=True`` and ``dest`` is a USS file or directory, this option will be ignored. Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. @@ -245,7 +247,7 @@ group When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str @@ -254,13 +256,13 @@ group mode The permission of the destination file or directory. - If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. - The mode may also be specified as a symbolic mode (for example, \`\`u+rwx\`\` or \`\`u=rw,g=r,o=r\`\`) or a special string \`preserve\`. + The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. - \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the source file. + *mode=preserve* means that the file will be given the same permissions as the source file. | **required**: False | **type**: str @@ -271,16 +273,16 @@ owner When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str remote_src - If set to \ :literal:`false`\ , the module searches for \ :literal:`src`\ at the local machine. + If set to ``false``, the module searches for ``src`` at the local machine. - If set to \ :literal:`true`\ , the module goes to the remote/target machine for \ :literal:`src`\ . + If set to ``true``, the module goes to the remote/target machine for ``src``. | **required**: False | **type**: bool @@ -290,27 +292,27 @@ remote_src src Path to a file/directory or name of a data set to copy to remote z/OS system. - If \ :literal:`remote\_src`\ is true, then \ :literal:`src`\ must be the path to a Unix System Services (USS) file, name of a data set, or data set member. + If ``remote_src`` is true, then ``src`` must be the path to a Unix System Services (USS) file, name of a data set, or data set member. - If \ :literal:`src`\ is a local path or a USS path, it can be absolute or relative. + If ``src`` is a local path or a USS path, it can be absolute or relative. - If \ :literal:`src`\ is a directory, \ :literal:`dest`\ must be a partitioned data set or a USS directory. + If ``src`` is a directory, ``dest`` must be a partitioned data set or a USS directory. - If \ :literal:`src`\ is a file and \ :literal:`dest`\ ends with "/" or is a directory, the file is copied to the directory with the same filename as \ :literal:`src`\ . + If ``src`` is a file and ``dest`` ends with "/" or is a directory, the file is copied to the directory with the same filename as ``src``. - If \ :literal:`src`\ is a directory and ends with "/", the contents of it will be copied into the root of \ :literal:`dest`\ . If it doesn't end with "/", the directory itself will be copied. + If ``src`` is a directory and ends with "/", the contents of it will be copied into the root of ``dest``. If it doesn't end with "/", the directory itself will be copied. - If \ :literal:`src`\ is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. + If ``src`` is a directory or a file, file names will be truncated and/or modified to ensure a valid name for a data set or member. - If \ :literal:`src`\ is a VSAM data set, \ :literal:`dest`\ must also be a VSAM. + If ``src`` is a VSAM data set, ``dest`` must also be a VSAM. - If \ :literal:`src`\ is a generation data set (GDS), it must be a previously allocated one. + If ``src`` is a generation data set (GDS), it must be a previously allocated one. - If \ :literal:`src`\ is a generation data group (GDG), \ :literal:`dest`\ can be another GDG or a USS directory. + If ``src`` is a generation data group (GDG), ``dest`` can be another GDG or a USS directory. Wildcards can be used to copy multiple PDS/PDSE members to another PDS/PDSE. - Required unless using \ :literal:`content`\ . + Required unless using ``content``. | **required**: False | **type**: str @@ -327,24 +329,24 @@ validate volume - If \ :literal:`dest`\ does not exist, specify which volume \ :literal:`dest`\ should be allocated to. + If ``dest`` does not exist, specify which volume ``dest`` should be allocated to. Only valid when the destination is an MVS data set. The volume must already be present on the device. - If no volume is specified, storage management rules will be used to determine the volume where \ :literal:`dest`\ will be allocated. + If no volume is specified, storage management rules will be used to determine the volume where ``dest`` will be allocated. - If the storage administrator has specified a system default unit name and you do not set a \ :literal:`volume`\ name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. + If the storage administrator has specified a system default unit name and you do not set a ``volume`` name for non-system-managed data sets, then the system uses the volumes associated with the default unit name. Check with your storage administrator to determine whether a default unit name has been specified. | **required**: False | **type**: str dest_data_set - Data set attributes to customize a \ :literal:`dest`\ data set to be copied into. + Data set attributes to customize a ``dest`` data set to be copied into. - Some attributes only apply when \ :literal:`dest`\ is a generation data group (GDG). + Some attributes only apply when ``dest`` is a generation data group (GDG). | **required**: False | **type**: dict @@ -359,18 +361,18 @@ dest_data_set space_primary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int space_secondary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -379,7 +381,7 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -387,7 +389,7 @@ dest_data_set record_format - If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`fb`\ ) + If the destination data set does not exist, this sets the format of the data set. (e.g ``fb``) Choices are case-sensitive. @@ -424,9 +426,9 @@ dest_data_set key_offset The key offset to use when creating a KSDS data set. - \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . + *key_offset* is required when *type=ksds*. - \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_offset* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -435,9 +437,9 @@ dest_data_set key_length The key length to use when creating a KSDS data set. - \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . + *key_length* is required when *type=ksds*. - \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_length* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -483,18 +485,18 @@ dest_data_set limit - Sets the \ :emphasis:`limit`\ attribute for a GDG. + Sets the *limit* attribute for a GDG. Specifies the maximum number, from 1 to 255(up to 999 if extended), of generations that can be associated with the GDG being defined. - \ :emphasis:`limit`\ is required when \ :emphasis:`type=gdg`\ . + *limit* is required when *type=gdg*. | **required**: False | **type**: int empty - Sets the \ :emphasis:`empty`\ attribute for a GDG. + Sets the *empty* attribute for a GDG. If false, removes only the oldest GDS entry when a new GDS is created that causes GDG limit to be exceeded. @@ -505,7 +507,7 @@ dest_data_set scratch - Sets the \ :emphasis:`scratch`\ attribute for a GDG. + Sets the *scratch* attribute for a GDG. Specifies what action is to be taken for a generation data set located on disk volumes when the data set is uncataloged from the GDG base as a result of EMPTY/NOEMPTY processing. @@ -514,16 +516,16 @@ dest_data_set purge - Sets the \ :emphasis:`purge`\ attribute for a GDG. + Sets the *purge* attribute for a GDG. - Specifies whether to override expiration dates when a generation data set (GDS) is rolled off and the \ :literal:`scratch`\ option is set. + Specifies whether to override expiration dates when a generation data set (GDS) is rolled off and the ``scratch`` option is set. | **required**: False | **type**: bool extended - Sets the \ :emphasis:`extended`\ attribute for a GDG. + Sets the *extended* attribute for a GDG. If false, allow up to 255 generation data sets (GDSs) to be associated with the GDG. @@ -534,7 +536,7 @@ dest_data_set fifo - Sets the \ :emphasis:`fifo`\ attribute for a GDG. + Sets the *fifo* attribute for a GDG. If false, the order is the newest GDS defined to the oldest GDS. This is the default value. @@ -546,13 +548,13 @@ dest_data_set use_template - Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when \ :literal:`src`\ is a local file or directory. + Only valid when ``src`` is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ | **required**: False | **type**: bool @@ -562,9 +564,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. - These options are ignored unless \ :literal:`use\_template`\ is true. + These options are ignored unless ``use_template`` is true. | **required**: False | **type**: dict @@ -643,7 +645,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -890,17 +892,17 @@ Notes .. note:: Destination data sets are assumed to be in catalog. When trying to copy to an uncataloged data set, the module assumes that the data set does not exist and will create it. - Destination will be backed up if either \ :literal:`backup`\ is \ :literal:`true`\ or \ :literal:`backup\_name`\ is provided. If \ :literal:`backup`\ is \ :literal:`false`\ but \ :literal:`backup\_name`\ is provided, task will fail. + Destination will be backed up if either ``backup`` is ``true`` or ``backup_name`` is provided. If ``backup`` is ``false`` but ``backup_name`` is provided, task will fail. When copying local files or directories, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file or directory being copied. Temporary files will always be deleted, regardless of success or failure of the copy task. VSAM data sets can only be copied to other VSAM data sets. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - Beginning in version 1.8.x, zos\_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option \ :literal:`executable`\ that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos\_copy.html) error. + Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this behavior using module option ``executable`` that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. It is the playbook author or user's responsibility to ensure they have appropriate authority to the RACF FACILITY resource class. A user is described as the remote user, configured either for the playbook or playbook tasks, who can also obtain escalated privileges to execute as root or another user. @@ -1011,7 +1013,7 @@ destination_attributes checksum - SHA256 checksum of the file after running zos\_copy. + SHA256 checksum of the file after running zos_copy. | **returned**: When ``validate=true`` and if ``dest`` is USS | **type**: str diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index caed66ba9..7a56cfe84 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -28,11 +28,11 @@ Parameters name - The name of the data set being managed. (e.g \ :literal:`USER.TEST`\ ) + The name of the data set being managed. (e.g ``USER.TEST``) - If \ :emphasis:`name`\ is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. + If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - Required if \ :emphasis:`type=member`\ or \ :emphasis:`state!=present`\ and not using \ :emphasis:`batch`\ . + Required if *type=member* or *state!=present* and not using *batch*. | **required**: False | **type**: str @@ -41,52 +41,52 @@ name state The final state desired for specified data set. - If \ :emphasis:`state=absent`\ and the data set does not exist on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=absent`\ and the data set does exist on the managed node, remove the data set, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and \ :emphasis:`force=True`\ , the data set will be opened with \ :emphasis:`DISP=SHR`\ such that the entire data set can be accessed by other processes while the specified member is deleted. + If *state=absent* and *type=member* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with \ :emphasis:`changed=True`\ . + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided \ :emphasis:`volumes`\ . If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If the volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`type=gdg`\ and the GDG base has active generations the module will complete successfully with \ :emphasis:`changed=False`\ . To remove it option \ :emphasis:`force`\ needs to be used. If the GDG base does not have active generations the module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=absent* and *type=gdg* and the GDG base has active generations the module will complete successfully with *changed=False*. To remove it option *force* needs to be used. If the GDG base does not have active generations the module will complete successfully with *changed=True*. - If \ :emphasis:`state=present`\ and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. - If \ :emphasis:`state=present`\ and \ :emphasis:`replace=True`\ and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=present* and *replace=True* and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with *changed=True*. - If \ :emphasis:`state=present`\ and \ :emphasis:`replace=False`\ and the data set is present on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=present`\ and \ :emphasis:`type=member`\ and the member does not exist in the data set, create a member formatted to store data, module completes successfully with \ :emphasis:`changed=True`\ . Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + If *state=present* and *type=member* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is already cataloged, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, module completes successfully with *changed=True*. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, returns failure with \ :emphasis:`changed=False`\ . + If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. - If \ :emphasis:`state=uncataloged`\ and the data set is not found, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=uncataloged`\ and the data set is found, the data set is uncataloged, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. | **required**: False @@ -96,9 +96,9 @@ state type - The data set type to be used when creating a data set. (e.g \ :literal:`pdse`\ ). + The data set type to be used when creating a data set. (e.g ``pdse``). - \ :literal:`member`\ expects to be used with an existing partitioned data set. + ``member`` expects to be used with an existing partitioned data set. Choices are case-sensitive. @@ -111,7 +111,7 @@ type space_primary The amount of primary space to allocate for the dataset. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -121,7 +121,7 @@ space_primary space_secondary The amount of secondary space to allocate for the dataset. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -131,7 +131,7 @@ space_secondary space_type The unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -140,11 +140,11 @@ space_type record_format - The format of the data set. (e.g \ :literal:`FB`\ ) + The format of the data set. (e.g ``FB``) Choices are case-sensitive. - When \ :emphasis:`type=ksds`\ , \ :emphasis:`type=esds`\ , \ :emphasis:`type=rrds`\ , \ :emphasis:`type=lds`\ or \ :emphasis:`type=zfs`\ then \ :emphasis:`record\_format=None`\ , these types do not have a default \ :emphasis:`record\_format`\ . + When *type=ksds*, *type=esds*, *type=rrds*, *type=lds* or *type=zfs* then *record_format=None*, these types do not have a default *record_format*. | **required**: False | **type**: str @@ -219,9 +219,9 @@ directory_blocks key_offset The key offset to use when creating a KSDS data set. - \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . + *key_offset* is required when *type=ksds*. - \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_offset* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -230,16 +230,16 @@ key_offset key_length The key length to use when creating a KSDS data set. - \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . + *key_length* is required when *type=ksds*. - \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_length* should only be provided when *type=ksds* | **required**: False | **type**: int empty - Sets the \ :emphasis:`empty`\ attribute for Generation Data Groups. + Sets the *empty* attribute for Generation Data Groups. If false, removes only the oldest GDS entry when a new GDS is created that causes GDG limit to be exceeded. @@ -252,7 +252,7 @@ empty extended - Sets the \ :emphasis:`extended`\ attribute for Generation Data Groups. + Sets the *extended* attribute for Generation Data Groups. If false, allow up to 255 generation data sets (GDSs) to be associated with the GDG. @@ -265,7 +265,7 @@ extended fifo - Sets the \ :emphasis:`fifo`\ attribute for Generation Data Groups. + Sets the *fifo* attribute for Generation Data Groups. If false, the order is the newest GDS defined to the oldest GDS. This is the default value. @@ -278,27 +278,27 @@ fifo limit - Sets the \ :emphasis:`limit`\ attribute for Generation Data Groups. + Sets the *limit* attribute for Generation Data Groups. Specifies the maximum number, from 1 to 255(up to 999 if extended), of GDS that can be associated with the GDG being defined. - \ :emphasis:`limit`\ is required when \ :emphasis:`type=gdg`\ . + *limit* is required when *type=gdg*. | **required**: False | **type**: int purge - Sets the \ :emphasis:`purge`\ attribute for Generation Data Groups. + Sets the *purge* attribute for Generation Data Groups. - Specifies whether to override expiration dates when a generation data set (GDS) is rolled off and the \ :literal:`scratch`\ option is set. + Specifies whether to override expiration dates when a generation data set (GDS) is rolled off and the ``scratch`` option is set. | **required**: False | **type**: bool scratch - Sets the \ :emphasis:`scratch`\ attribute for Generation Data Groups. + Sets the *scratch* attribute for Generation Data Groups. Specifies what action is to be taken for a generation data set located on disk volumes when the data set is uncataloged from the GDG base as a result of EMPTY/NOEMPTY processing. @@ -307,19 +307,19 @@ scratch volumes - If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. + If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. - If creating a data set, \ :emphasis:`volumes`\ specifies the volume(s) where the data set should be created. + If creating a data set, *volumes* specifies the volume(s) where the data set should be created. - If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=present`\ , and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. + If *volumes* is provided when *state=present*, and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. - If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=absent`\ and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. + If *volumes* is provided when *state=absent* and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. - \ :emphasis:`volumes`\ is required when \ :emphasis:`state=cataloged`\ . + *volumes* is required when *state=cataloged*. Accepts a string when using a single volume and a list of strings when using multiple. @@ -328,12 +328,12 @@ volumes replace - When \ :emphasis:`replace=True`\ , and \ :emphasis:`state=present`\ , existing data set matching \ :emphasis:`name`\ will be replaced. + When *replace=True*, and *state=present*, existing data set matching *name* will be replaced. Replacement is performed by deleting the existing data set and creating a new data set with the same name and desired attributes. Since the existing data set will be deleted prior to creating the new data set, no data set will exist if creation of the new data set fails. - If \ :emphasis:`replace=True`\ , all data in the original data set will be lost. + If *replace=True*, all data in the original data set will be lost. | **required**: False | **type**: bool @@ -343,7 +343,7 @@ replace tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -354,11 +354,11 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. - The \ :emphasis:`force=True`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . + The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. - The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and when removing a GDG base with active generations. + The *force=True* only applies to data set members when *state=absent* and *type=member* and when removing a GDG base with active generations. - If \ :emphasis:`force=True`\ , \ :emphasis:`type=gdg`\ and \ :emphasis:`state=absent`\ it will force remove a GDG base with active generations. + If *force=True*, *type=gdg* and *state=absent* it will force remove a GDG base with active generations. | **required**: False | **type**: bool @@ -374,11 +374,11 @@ batch name - The name of the data set being managed. (e.g \ :literal:`USER.TEST`\ ) + The name of the data set being managed. (e.g ``USER.TEST``) - If \ :emphasis:`name`\ is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. + If *name* is not provided, a randomized data set name will be generated with the HLQ matching the module-runners username. - Required if \ :emphasis:`type=member`\ or \ :emphasis:`state!=present`\ + Required if *type=member* or *state!=present* | **required**: False | **type**: str @@ -387,49 +387,49 @@ batch state The final state desired for specified data set. - If \ :emphasis:`state=absent`\ and the data set does not exist on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=absent`\ and the data set does exist on the managed node, remove the data set, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=absent* and the data set does exist on the managed node, remove the data set, module completes successfully with *changed=True*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ and \ :emphasis:`force=True`\ , the data set will be opened with \ :emphasis:`DISP=SHR`\ such that the entire data set can be accessed by other processes while the specified member is deleted. + If *state=absent* and *type=member* and *force=True*, the data set will be opened with *DISP=SHR* such that the entire data set can be accessed by other processes while the specified member is deleted. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with \ :emphasis:`changed=True`\ . + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, then the data set is removed. Module completes successfully with *changed=True*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and *volumes* is provided, and the data set is not found in the catalog, the module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, then no action is taken. Module completes successfully with *changed=False*. - If \ :emphasis:`state=absent`\ and \ :emphasis:`volumes`\ is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided \ :emphasis:`volumes`\ . If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with \ :emphasis:`changed=False`\ . + If *state=absent* and *volumes* is provided, and the data set is found in the catalog, the module compares the catalog volume attributes to the provided *volumes*. If they volume attributes are different, the cataloged data set will be uncataloged temporarily while the requested data set be deleted is cataloged. The module will catalog the original data set on completion, if the attempts to catalog fail, no action is taken. Module completes successfully with *changed=False*. - If \ :emphasis:`state=present`\ and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=present* and the data set does not exist on the managed node, create and catalog the data set, module completes successfully with *changed=True*. - If \ :emphasis:`state=present`\ and \ :emphasis:`replace=True`\ and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=present* and *replace=True* and the data set is present on the managed node the existing data set is deleted, and a new data set is created and cataloged with the desired attributes, module completes successfully with *changed=True*. - If \ :emphasis:`state=present`\ and \ :emphasis:`replace=False`\ and the data set is present on the managed node, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=present* and *replace=False* and the data set is present on the managed node, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=present`\ and \ :emphasis:`type=member`\ and the member does not exist in the data set, create a member formatted to store data, module completes successfully with \ :emphasis:`changed=True`\ . Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. + If *state=present* and *type=member* and the member does not exist in the data set, create a member formatted to store data, module completes successfully with *changed=True*. Note, a PDSE does not allow a mixture of formats such that there is executables (program objects) and data. The member created is formatted to store data, not an executable. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is already cataloged, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=cataloged* and *volumes* is provided and the data set is already cataloged, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog is successful, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog is successful, module completes successfully with *changed=True*. - If \ :emphasis:`state=cataloged`\ and \ :emphasis:`volumes`\ is provided and the data set is not cataloged, module attempts to perform catalog using supplied \ :emphasis:`name`\ and \ :emphasis:`volumes`\ . If the attempt to catalog the data set catalog fails, returns failure with \ :emphasis:`changed=False`\ . + If *state=cataloged* and *volumes* is provided and the data set is not cataloged, module attempts to perform catalog using supplied *name* and *volumes*. If the attempt to catalog the data set catalog fails, returns failure with *changed=False*. - If \ :emphasis:`state=uncataloged`\ and the data set is not found, no action taken, module completes successfully with \ :emphasis:`changed=False`\ . + If *state=uncataloged* and the data set is not found, no action taken, module completes successfully with *changed=False*. - If \ :emphasis:`state=uncataloged`\ and the data set is found, the data set is uncataloged, module completes successfully with \ :emphasis:`changed=True`\ . + If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. | **required**: False @@ -439,9 +439,9 @@ batch type - The data set type to be used when creating a data set. (e.g \ :literal:`pdse`\ ) + The data set type to be used when creating a data set. (e.g ``pdse``) - \ :literal:`member`\ expects to be used with an existing partitioned data set. + ``member`` expects to be used with an existing partitioned data set. Choices are case-sensitive. @@ -454,7 +454,7 @@ batch space_primary The amount of primary space to allocate for the dataset. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -464,7 +464,7 @@ batch space_secondary The amount of secondary space to allocate for the dataset. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -474,7 +474,7 @@ batch space_type The unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -483,11 +483,11 @@ batch record_format - The format of the data set. (e.g \ :literal:`FB`\ ) + The format of the data set. (e.g ``FB``) Choices are case-sensitive. - When \ :emphasis:`type=ksds`\ , \ :emphasis:`type=esds`\ , \ :emphasis:`type=rrds`\ , \ :emphasis:`type=lds`\ or \ :emphasis:`type=zfs`\ then \ :emphasis:`record\_format=None`\ , these types do not have a default \ :emphasis:`record\_format`\ . + When *type=ksds*, *type=esds*, *type=rrds*, *type=lds* or *type=zfs* then *record_format=None*, these types do not have a default *record_format*. | **required**: False | **type**: str @@ -562,9 +562,9 @@ batch key_offset The key offset to use when creating a KSDS data set. - \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . + *key_offset* is required when *type=ksds*. - \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_offset* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -573,16 +573,16 @@ batch key_length The key length to use when creating a KSDS data set. - \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . + *key_length* is required when *type=ksds*. - \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_length* should only be provided when *type=ksds* | **required**: False | **type**: int empty - Sets the \ :emphasis:`empty`\ attribute for Generation Data Groups. + Sets the *empty* attribute for Generation Data Groups. If false, removes only the oldest GDS entry when a new GDS is created that causes GDG limit to be exceeded. @@ -595,7 +595,7 @@ batch extended - Sets the \ :emphasis:`extended`\ attribute for Generation Data Groups. + Sets the *extended* attribute for Generation Data Groups. If false, allow up to 255 generation data sets (GDSs) to be associated with the GDG. @@ -608,7 +608,7 @@ batch fifo - Sets the \ :emphasis:`fifo`\ attribute for Generation Data Groups. + Sets the *fifo* attribute for Generation Data Groups. If false, the order is the newest GDS defined to the oldest GDS. This is the default value. @@ -621,27 +621,27 @@ batch limit - Sets the \ :emphasis:`limit`\ attribute for Generation Data Groups. + Sets the *limit* attribute for Generation Data Groups. Specifies the maximum number, from 1 to 255(up to 999 if extended), of GDS that can be associated with the GDG being defined. - \ :emphasis:`limit`\ is required when \ :emphasis:`type=gdg`\ . + *limit* is required when *type=gdg*. | **required**: False | **type**: int purge - Sets the \ :emphasis:`purge`\ attribute for Generation Data Groups. + Sets the *purge* attribute for Generation Data Groups. - Specifies whether to override expiration dates when a generation data set (GDS) is rolled off and the \ :literal:`scratch`\ option is set. + Specifies whether to override expiration dates when a generation data set (GDS) is rolled off and the ``scratch`` option is set. | **required**: False | **type**: bool scratch - Sets the \ :emphasis:`scratch`\ attribute for Generation Data Groups. + Sets the *scratch* attribute for Generation Data Groups. Specifies what action is to be taken for a generation data set located on disk volumes when the data set is uncataloged from the GDG base as a result of EMPTY/NOEMPTY processing. @@ -650,19 +650,19 @@ batch volumes - If cataloging a data set, \ :emphasis:`volumes`\ specifies the name of the volume(s) where the data set is located. + If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. - If creating a data set, \ :emphasis:`volumes`\ specifies the volume(s) where the data set should be created. + If creating a data set, *volumes* specifies the volume(s) where the data set should be created. - If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=present`\ , and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. + If *volumes* is provided when *state=present*, and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged. - If \ :emphasis:`volumes`\ is provided when \ :emphasis:`state=absent`\ and the data set is not found in the catalog, \ `zos\_data\_set <./zos_data_set.html>`__\ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. + If *volumes* is provided when *state=absent* and the data set is not found in the catalog, `zos_data_set <./zos_data_set.html>`_ will check the volume table of contents to see if the data set exists. If the data set does exist, it will be cataloged and promptly removed from the system. - \ :emphasis:`volumes`\ is required when \ :emphasis:`state=cataloged`\ . + *volumes* is required when *state=cataloged*. Accepts a string when using a single volume and a list of strings when using multiple. @@ -671,12 +671,12 @@ batch replace - When \ :emphasis:`replace=True`\ , and \ :emphasis:`state=present`\ , existing data set matching \ :emphasis:`name`\ will be replaced. + When *replace=True*, and *state=present*, existing data set matching *name* will be replaced. Replacement is performed by deleting the existing data set and creating a new data set with the same name and desired attributes. Since the existing data set will be deleted prior to creating the new data set, no data set will exist if creation of the new data set fails. - If \ :emphasis:`replace=True`\ , all data in the original data set will be lost. + If *replace=True*, all data in the original data set will be lost. | **required**: False | **type**: bool @@ -688,9 +688,9 @@ batch This is helpful when a data set is being used in a long running process such as a started task and you are wanting to delete a member. - The \ :emphasis:`force=True`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . + The *force=True* option enables sharing of data sets through the disposition *DISP=SHR*. - The \ :emphasis:`force=True`\ only applies to data set members when \ :emphasis:`state=absent`\ and \ :emphasis:`type=member`\ . + The *force=True* only applies to data set members when *state=absent* and *type=member*. | **required**: False | **type**: bool diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 51bcca12d..860a150bf 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -37,7 +37,7 @@ encoding from - The character set of the source \ :emphasis:`src`\ . + The character set of the source *src*. | **required**: False | **type**: str @@ -45,7 +45,7 @@ encoding to - The destination \ :emphasis:`dest`\ character set for the output to be written as. + The destination *dest* character set for the output to be written as. | **required**: False | **type**: str @@ -58,7 +58,7 @@ src The USS path or file must be an absolute pathname. - If \ :emphasis:`src`\ is a USS directory, all files will be encoded. + If *src* is a USS directory, all files will be encoded. Encoding a whole generation data group (GDG) is not supported. @@ -69,24 +69,24 @@ src dest The location where the converted characters are output. - The destination \ :emphasis:`dest`\ can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, a generation data set (GDS) or KSDS (VSAM data set). + The destination *dest* can be a UNIX System Services (USS) file or path, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE, a generation data set (GDS) or KSDS (VSAM data set). - If the length of the PDSE member name used in \ :emphasis:`dest`\ is greater than 8 characters, the member name will be truncated when written out. + If the length of the PDSE member name used in *dest* is greater than 8 characters, the member name will be truncated when written out. - If \ :emphasis:`dest`\ is not specified, the \ :emphasis:`src`\ will be used as the destination and will overwrite the \ :emphasis:`src`\ with the character set in the option \ :emphasis:`to\_encoding`\ . + If *dest* is not specified, the *src* will be used as the destination and will overwrite the *src* with the character set in the option *to_encoding*. The USS file or path must be an absolute pathname. - If \ :emphasis:`dest`\ is a data set, it must be already allocated. + If *dest* is a data set, it must be already allocated. | **required**: False | **type**: str backup - Creates a backup file or backup data set for \ :emphasis:`dest`\ , including the timestamp information to ensure that you retrieve the original file. + Creates a backup file or backup data set for *dest*, including the timestamp information to ensure that you retrieve the original file. - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + *backup_name* can be used to specify a backup file name if *backup=true*. | **required**: False | **type**: bool @@ -96,15 +96,15 @@ backup backup_name Specify the USS file name or data set name for the dest backup. - If dest is a USS file or path, \ :emphasis:`backup\_name`\ must be a file or path name, and the USS path or file must be an absolute pathname. + If dest is a USS file or path, *backup_name* must be a file or path name, and the USS path or file must be an absolute pathname. - If dest is an MVS data set, the \ :emphasis:`backup\_name`\ must be an MVS data set name. + If dest is an MVS data set, the *backup_name* must be an MVS data set name. - If \ :emphasis:`backup\_name`\ is not provided, the default backup name will be used. The default backup name for a USS file or path will be the destination file or path name appended with a timestamp, e.g. /path/file\_name.2020-04-23-08-32-29-bak.tar. If dest is an MVS data set, the default backup name will be a random name generated by IBM Z Open Automation Utilities. + If *backup_name* is not provided, the default backup name will be used. The default backup name for a USS file or path will be the destination file or path name appended with a timestamp, e.g. /path/file_name.2020-04-23-08-32-29-bak.tar. If dest is an MVS data set, the default backup name will be a random name generated by IBM Z Open Automation Utilities. - \ :literal:`backup\_name`\ will be returned on either success or failure of module execution such that data can be retrieved. + ``backup_name`` will be returned on either success or failure of module execution such that data can be retrieved. - If \ :emphasis:`backup\_name`\ is a generation data set (GDS), it must be a relative positive name (for example, \ :literal:`HLQ.USER.GDG(+1)`\ ). + If *backup_name* is a generation data set (GDS), it must be a relative positive name (for example, V(HLQ.USER.GDG(+1\))). | **required**: False | **type**: str @@ -113,7 +113,7 @@ backup_name backup_compress Determines if backups to USS files or paths should be compressed. - \ :emphasis:`backup\_compress`\ is only used when \ :emphasis:`backup=true`\ . + *backup_compress* is only used when *backup=true*. | **required**: False | **type**: bool @@ -123,7 +123,7 @@ backup_compress tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -288,7 +288,7 @@ Notes All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. @@ -301,7 +301,7 @@ Return Values src - The location of the input characters identified in option \ :emphasis:`src`\ . + The location of the input characters identified in option *src*. | **returned**: always | **type**: str diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 23d58c864..e3f0df325 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -20,7 +20,7 @@ Synopsis - When fetching a sequential data set, the destination file name will be the same as the data set name. - When fetching a PDS or PDSE, the destination will be a directory with the same name as the PDS or PDSE. - When fetching a PDS/PDSE member, destination will be a file. -- Files that already exist at \ :literal:`dest`\ will be overwritten if they are different than \ :literal:`src`\ . +- Files that already exist at ``dest`` will be overwritten if they are different than ``src``. - When fetching a GDS, the relative name will be resolved to its absolute one. - When fetching a generation data group, the destination will be a directory with the same name as the GDG. @@ -98,7 +98,7 @@ encoding from - The character set of the source \ :emphasis:`src`\ . + The character set of the source *src*. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -107,7 +107,7 @@ encoding to - The destination \ :emphasis:`dest`\ character set for the output to be written as. + The destination *dest* character set for the output to be written as. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -119,20 +119,20 @@ encoding tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str ignore_sftp_stderr - During data transfer through sftp, the module fails if the sftp command directs any content to stderr. The user is able to override this behavior by setting this parameter to \ :literal:`true`\ . By doing so, the module would essentially ignore the stderr stream produced by sftp and continue execution. + During data transfer through SFTP, the SFTP command directs content to stderr. By default, the module essentially ignores the stderr stream produced by SFTP and continues execution. The user is able to override this behavior by setting this parameter to ``false``. By doing so, any content written to stderr is considered an error by Ansible and will have module fail. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using \ :strong:`-vvvv`\ or through environment variables such as \ :strong:`verbosity = 4`\ , then this parameter will automatically be set to \ :literal:`true`\ . + When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. | **required**: False | **type**: bool - | **default**: False + | **default**: True @@ -216,13 +216,13 @@ Notes .. note:: When fetching PDSE and VSAM data sets, temporary storage will be used on the remote z/OS system. After the PDSE or VSAM data set is successfully transferred, the temporary storage will be deleted. The size of the temporary storage will correspond to the size of PDSE or VSAM data set being fetched. If module execution fails, the temporary storage will be deleted. - To ensure optimal performance, data integrity checks for PDS, PDSE, and members of PDS or PDSE are done through the transfer methods used. As a result, the module response will not include the \ :literal:`checksum`\ parameter. + To ensure optimal performance, data integrity checks for PDS, PDSE, and members of PDS or PDSE are done through the transfer methods used. As a result, the module response will not include the ``checksum`` parameter. All data sets are always assumed to be cataloged. If an uncataloged data set needs to be fetched, it should be cataloged first. Fetching HFS or ZFS type data sets is currently not supported. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. This module uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. @@ -283,7 +283,7 @@ data_set_type | **sample**: PDSE note - Notice of module failure when \ :literal:`fail\_on\_missing`\ is false. + Notice of module failure when ``fail_on_missing`` is false. | **returned**: failure and fail_on_missing=false | **type**: str diff --git a/docs/source/modules/zos_find.rst b/docs/source/modules/zos_find.rst index 83082b5c0..5c23a28a7 100644 --- a/docs/source/modules/zos_find.rst +++ b/docs/source/modules/zos_find.rst @@ -18,7 +18,7 @@ Synopsis -------- - Return a list of data sets based on specific criteria. - Multiple criteria can be added (AND'd) together. -- The \ :literal:`zos\_find`\ module can only find MVS data sets. Use the \ `find <https://docs.ansible.com/ansible/latest/modules/find_module.html>`__\ module to find USS files. +- The ``zos_find`` module can only find MVS data sets. Use the `find <https://docs.ansible.com/ansible/latest/modules/find_module.html>`_ module to find USS files. @@ -44,9 +44,9 @@ age age_stamp Choose the age property against which to compare age. - \ :literal:`creation\_date`\ is the date the data set was created and \ :literal:`ref\_date`\ is the date the data set was last referenced. + ``creation_date`` is the date the data set was created and ``ref_date`` is the date the data set was last referenced. - \ :literal:`ref\_date`\ is only applicable to sequential and partitioned data sets. + ``ref_date`` is only applicable to sequential and partitioned data sets. | **required**: False | **type**: str @@ -80,7 +80,7 @@ patterns This parameter expects a list, which can be either comma separated or YAML. - If \ :literal:`pds\_patterns`\ is provided, \ :literal:`patterns`\ must be member patterns. + If ``pds_patterns`` is provided, ``patterns`` must be member patterns. When searching for members within a PDS/PDSE, pattern can be a regular expression. @@ -107,7 +107,7 @@ pds_patterns Required when searching for data set members. - Valid only for \ :literal:`nonvsam`\ resource types. Otherwise ignored. + Valid only for ``nonvsam`` resource types. Otherwise ignored. | **required**: False | **type**: list @@ -117,14 +117,16 @@ pds_patterns resource_type The type of resource to search. - \ :literal:`nonvsam`\ refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. + ``nonvsam`` refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. - \ :literal:`cluster`\ refers to a VSAM cluster. The \ :literal:`data`\ and \ :literal:`index`\ are the data and index components of a VSAM cluster. + ``cluster`` refers to a VSAM cluster. The ``data`` and ``index`` are the data and index components of a VSAM cluster. + + ``gdg`` refers to Generation Data Groups. The module searches based on the GDG base name. | **required**: False | **type**: str | **default**: nonvsam - | **choices**: nonvsam, cluster, data, index + | **choices**: nonvsam, cluster, data, index, gdg volume @@ -135,6 +137,60 @@ volume | **elements**: str +empty + A GDG attribute, only valid when ``resource_type=gdg``. + + If provided, will search for data sets with *empty* attribute set as provided. + + | **required**: False + | **type**: bool + + +extended + A GDG attribute, only valid when ``resource_type=gdg``. + + If provided, will search for data sets with *extended* attribute set as provided. + + | **required**: False + | **type**: bool + + +fifo + A GDG attribute, only valid when ``resource_type=gdg``. + + If provided, will search for data sets with *fifo* attribute set as provided. + + | **required**: False + | **type**: bool + + +limit + A GDG attribute, only valid when ``resource_type=gdg``. + + If provided, will search for data sets with *limit* attribute set as provided. + + | **required**: False + | **type**: int + + +purge + A GDG attribute, only valid when ``resource_type=gdg``. + + If provided, will search for data sets with *purge* attribute set as provided. + + | **required**: False + | **type**: bool + + +scratch + A GDG attribute, only valid when ``resource_type=gdg``. + + If provided, will search for data sets with *scratch* attribute set as provided. + + | **required**: False + | **type**: bool + + Examples @@ -185,6 +241,15 @@ Examples - USER.* resource_type: cluster + - name: Find all Generation Data Groups starting with the word 'USER' and specific GDG attributes. + zos_find: + patterns: + - USER.* + resource_type: gdg + limit: 30 + scratch: true + purge: true + @@ -192,11 +257,11 @@ Notes ----- .. note:: - Only cataloged data sets will be searched. If an uncataloged data set needs to be searched, it should be cataloged first. The \ `zos\_data\_set <./zos_data_set.html>`__\ module can be used to catalog uncataloged data sets. + Only cataloged data sets will be searched. If an uncataloged data set needs to be searched, it should be cataloged first. The `zos_data_set <./zos_data_set.html>`_ module can be used to catalog uncataloged data sets. - The \ `zos\_find <./zos_find.html>`__\ module currently does not support wildcards for high level qualifiers. For example, \ :literal:`SOME.\*.DATA.SET`\ is a valid pattern, but \ :literal:`\*.DATA.SET`\ is not. + The `zos_find <./zos_find.html>`_ module currently does not support wildcards for high level qualifiers. For example, ``SOME.*.DATA.SET`` is a valid pattern, but ``*.DATA.SET`` is not. - If a data set pattern is specified as \ :literal:`USER.\*`\ , the matching data sets will have two name segments such as \ :literal:`USER.ABC`\ , \ :literal:`USER.XYZ`\ etc. If a wildcard is specified as \ :literal:`USER.\*.ABC`\ , the matching data sets will have three name segments such as \ :literal:`USER.XYZ.ABC`\ , \ :literal:`USER.TEST.ABC`\ etc. + If a data set pattern is specified as ``USER.*``, the matching data sets will have two name segments such as ``USER.ABC``, ``USER.XYZ`` etc. If a wildcard is specified as ``USER.*.ABC``, the matching data sets will have three name segments such as ``USER.XYZ.ABC``, ``USER.TEST.ABC`` etc. The time taken to execute the module is proportional to the number of data sets present on the system and how large the data sets are. diff --git a/docs/source/modules/zos_gather_facts.rst b/docs/source/modules/zos_gather_facts.rst index 02a56fd23..0247ffd96 100644 --- a/docs/source/modules/zos_gather_facts.rst +++ b/docs/source/modules/zos_gather_facts.rst @@ -17,8 +17,8 @@ zos_gather_facts -- Gather z/OS system facts. Synopsis -------- - Retrieve variables from target z/OS systems. -- Variables are added to the \ :emphasis:`ansible\_facts`\ dictionary, available to playbooks. -- Apply filters on the \ :emphasis:`gather\_subset`\ list to reduce the variables that are added to the \ :emphasis:`ansible\_facts`\ dictionary. +- Variables are added to the *ansible_facts* dictionary, available to playbooks. +- Apply filters on the *gather_subset* list to reduce the variables that are added to the *ansible_facts* dictionary. - Note, the module will fail fast if any unsupported options are provided. This is done to raise awareness of a failure in an automation setting. @@ -32,7 +32,7 @@ Parameters gather_subset If specified, it will collect facts that come under the specified subset (eg. ipl will return ipl facts). Specifying subsets is recommended to reduce time in gathering facts when the facts needed are in a specific subset. - The following subsets are available \ :literal:`ipl`\ , \ :literal:`cpu`\ , \ :literal:`sys`\ , and \ :literal:`iodf`\ . Depending on the version of ZOAU, additional subsets may be available. + The following subsets are available ``ipl``, ``cpu``, ``sys``, and ``iodf``. Depending on the version of ZOAU, additional subsets may be available. | **required**: False | **type**: list @@ -41,13 +41,13 @@ gather_subset filter - Filter out facts from the \ :emphasis:`ansible\_facts`\ dictionary. + Filter out facts from the *ansible_facts* dictionary. - Uses shell-style \ `fnmatch <https://docs.python.org/3/library/fnmatch.html>`__\ pattern matching to filter out the collected facts. + Uses shell-style `fnmatch <https://docs.python.org/3/library/fnmatch.html>`_ pattern matching to filter out the collected facts. - An empty list means 'no filter', same as providing '\*'. + An empty list means 'no filter', same as providing '*'. - Filtering is performed after the facts are gathered such that no compute is saved when filtering. Filtering only reduces the number of variables that are added to the \ :emphasis:`ansible\_facts`\ dictionary. To restrict the facts that are collected, refer to the \ :emphasis:`gather\_subset`\ parameter. + Filtering is performed after the facts are gathered such that no compute is saved when filtering. Filtering only reduces the number of variables that are added to the *ansible_facts* dictionary. To restrict the facts that are collected, refer to the *gather_subset* parameter. | **required**: False | **type**: list diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index 59e37aeb9..efea6ea2a 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -18,9 +18,9 @@ Synopsis -------- - Display the z/OS job output for a given criteria (Job id/Job name/owner) with/without a data definition name as a filter. - At least provide a job id/job name/owner. -- The job id can be specific such as "STC02560", or one that uses a pattern such as "STC\*" or "\*". -- The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP\*" or "\*". -- The owner can be specific such as "IBMUSER", or one that uses a pattern like "\*". +- The job id can be specific such as "STC02560", or one that uses a pattern such as "STC*" or "*". +- The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP*" or "*". +- The owner can be specific such as "IBMUSER", or one that uses a pattern like "*". - If there is no ddname, or if ddname="?", output of all the ddnames under the given job will be displayed. @@ -32,21 +32,21 @@ Parameters job_id - The z/OS job ID of the job containing the spool file. (e.g "STC02560", "STC\*") + The z/OS job ID of the job containing the spool file. (e.g "STC02560", "STC*") | **required**: False | **type**: str job_name - The name of the batch job. (e.g "TCPIP", "C\*") + The name of the batch job. (e.g "TCPIP", "C*") | **required**: False | **type**: str owner - The owner who ran the job. (e.g "IBMUSER", "\*") + The owner who ran the job. (e.g "IBMUSER", "*") | **required**: False | **type**: str @@ -97,7 +97,7 @@ Return Values jobs - The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret\_code dictionary with parameter msg\_txt = The job could not be found. + The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. | **returned**: success | **type**: list @@ -416,7 +416,7 @@ jobs | **sample**: CC 0000 msg_code - Return code extracted from the \`msg\` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". + Return code extracted from the `msg` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". | **type**: str | **sample**: S0C4 diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index e4da71341..ea320dfc3 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -17,8 +17,8 @@ zos_job_query -- Query job status Synopsis -------- - List z/OS job(s) and the current status of the job(s). -- Uses job\_name to filter the jobs by the job name. -- Uses job\_id to filter the jobs by the job identifier. +- Uses job_name to filter the jobs by the job name. +- Uses job_id to filter the jobs by the job identifier. - Uses owner to filter the jobs by the job owner. - Uses system to filter the jobs by system where the job is running (or ran) on. @@ -35,9 +35,9 @@ job_name A job name can be up to 8 characters long. - The \ :emphasis:`job\_name`\ can contain include multiple wildcards. + The *job_name* can contain include multiple wildcards. - The asterisk (\`\*\`) wildcard will match zero or more specified characters. + The asterisk (`*`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -56,13 +56,13 @@ owner job_id The job id that has been assigned to the job. - A job id must begin with \`STC\`, \`JOB\`, \`TSU\` and are followed by up to 5 digits. + A job id must begin with `STC`, `JOB`, `TSU` and are followed by up to 5 digits. - When a job id is greater than 99,999, the job id format will begin with \`S\`, \`J\`, \`T\` and are followed by 7 digits. + When a job id is greater than 99,999, the job id format will begin with `S`, `J`, `T` and are followed by 7 digits. - The \ :emphasis:`job\_id`\ can contain include multiple wildcards. + The *job_id* can contain include multiple wildcards. - The asterisk (\`\*\`) wildcard will match zero or more specified characters. + The asterisk (`*`) wildcard will match zero or more specified characters. | **required**: False | **type**: str @@ -122,7 +122,7 @@ changed | **type**: bool jobs - The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret\_code dictionary with parameter msg\_txt = The job could not be found. + The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. | **returned**: success | **type**: list @@ -211,7 +211,7 @@ jobs | **sample**: CC 0000 msg_code - Return code extracted from the \`msg\` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". + Return code extracted from the `msg` so that it can be evaluated. For example, ABEND(S0C4) would yield "S0C4". | **type**: str | **sample**: S0C4 diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index bec95cb54..573b4f4bd 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -31,11 +31,11 @@ Parameters src The source file or data set containing the JCL to submit. - It could be a physical sequential data set, a partitioned data set qualified by a member or a path (e.g. \ :literal:`USER.TEST`\ , \ :literal:`USER.JCL(TEST)`\ ), or a generation data set from a generation data group (for example, \ :literal:`USER.TEST.GDG(-2)`\ ). + It could be a physical sequential data set, a partitioned data set qualified by a member or a path (e.g. ``USER.TEST``, ``USER.JCL(TEST)``), or a generation data set from a generation data group (for example, ``USER.TEST.GDG(-2)``). - Or a USS file. (e.g \ :literal:`/u/tester/demo/sample.jcl`\ ) + Or a USS file. (e.g ``/u/tester/demo/sample.jcl``) - Or a LOCAL file in ansible control node. (e.g \ :literal:`/User/tester/ansible-playbook/sample.jcl`\ ) + Or a LOCAL file in ansible control node. (e.g ``/User/tester/ansible-playbook/sample.jcl``) When using a generation data set, only already created generations are valid. If either the relative name is positive, or negative but not found, the module will fail. @@ -44,13 +44,13 @@ src location - The JCL location. Supported choices are \ :literal:`data\_set`\ , \ :literal:`uss`\ or \ :literal:`local`\ . + The JCL location. Supported choices are ``data_set``, ``uss`` or ``local``. - \ :literal:`data\_set`\ can be a PDS, PDSE, sequential data set, or a generation data set. + ``data_set`` can be a PDS, PDSE, sequential data set, or a generation data set. - \ :literal:`uss`\ means the JCL location is located in UNIX System Services (USS). + ``uss`` means the JCL location is located in UNIX System Services (USS). - \ :literal:`local`\ means locally to the Ansible control node. + ``local`` means locally to the Ansible control node. | **required**: False | **type**: str @@ -59,9 +59,9 @@ location wait_time_s - Option \ :emphasis:`wait\_time\_s`\ is the total time that module \ `zos\_job\_submit <./zos_job_submit.html>`__\ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. + Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. - \ :emphasis:`wait\_time\_s`\ is measured in seconds and must be a value greater than 0 and less than 86400. + *wait_time_s* is measured in seconds and must be a value greater than 0 and less than 86400. | **required**: False | **type**: int @@ -88,9 +88,9 @@ return_output volume The volume serial (VOLSER) is where the data set resides. The option is required only when the data set is not cataloged on the system. - When configured, the \ `zos\_job\_submit <./zos_job_submit.html>`__\ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. + When configured, the `zos_job_submit <./zos_job_submit.html>`_ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - Ignored for \ :emphasis:`location=uss`\ and \ :emphasis:`location=local`\ . + Ignored for *location=uss* and *location=local*. | **required**: False | **type**: str @@ -99,7 +99,7 @@ volume encoding Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - This option is only supported for when \ :emphasis:`location=local`\ . + This option is only supported for when *location=local*. If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system encoding. @@ -131,13 +131,13 @@ encoding use_template - Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when \ :literal:`src`\ is a local file or directory. + Only valid when ``src`` is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ | **required**: False | **type**: bool @@ -147,9 +147,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. - These options are ignored unless \ :literal:`use\_template`\ is true. + These options are ignored unless ``use_template`` is true. | **required**: False | **type**: dict @@ -228,7 +228,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -330,9 +330,9 @@ Notes ----- .. note:: - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. @@ -345,7 +345,7 @@ Return Values jobs - List of jobs output. If no job status is found, this will return an empty ret\_code with msg\_txt explanation. + List of jobs output. If no job status is found, this will return an empty ret_code with msg_txt explanation. | **returned**: success | **type**: list @@ -692,25 +692,25 @@ jobs msg Job status resulting from the job submission. - Job status \`ABEND\` indicates the job ended abnormally. + Job status `ABEND` indicates the job ended abnormally. - Job status \`AC\` indicates the job is active, often a started task or job taking long. + Job status `AC` indicates the job is active, often a started task or job taking long. - Job status \`CAB\` indicates a converter abend. + Job status `CAB` indicates a converter abend. - Job status \`CANCELED\` indicates the job was canceled. + Job status `CANCELED` indicates the job was canceled. - Job status \`CNV\` indicates a converter error. + Job status `CNV` indicates a converter error. - Job status \`FLU\` indicates the job was flushed. + Job status `FLU` indicates the job was flushed. - Job status \`JCLERR\` or \`JCL ERROR\` indicates the JCL has an error. + Job status `JCLERR` or `JCL ERROR` indicates the JCL has an error. - Job status \`SEC\` or \`SEC ERROR\` indicates the job as encountered a security error. + Job status `SEC` or `SEC ERROR` indicates the job as encountered a security error. - Job status \`SYS\` indicates a system failure. + Job status `SYS` indicates a system failure. - Job status \`?\` indicates status can not be determined. + Job status `?` indicates status can not be determined. Jobs where status can not be determined will result in None (NULL). diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index e8d0b0eb2..1db6545c5 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -33,6 +33,8 @@ src The USS file must be an absolute pathname. + Generation data set (GDS) relative name of generation already created. ``e.g. SOME.CREATION(-1).`` + | **required**: True | **type**: str @@ -40,13 +42,13 @@ src regexp The regular expression to look for in every line of the USS file or data set. - For \ :literal:`state=present`\ , the pattern to replace if found. Only the last line found will be replaced. + For ``state=present``, the pattern to replace if found. Only the last line found will be replaced. - For \ :literal:`state=absent`\ , the pattern of the line(s) to remove. + For ``state=absent``, the pattern of the line(s) to remove. - If the regular expression is not matched, the line will be added to the USS file or data set in keeping with \ :literal:`insertbefore`\ or \ :literal:`insertafter`\ settings. + If the regular expression is not matched, the line will be added to the USS file or data set in keeping with ``insertbefore`` or ``insertafter`` settings. - When modifying a line the regexp should typically match both the initial state of the line as well as its state after replacement by \ :literal:`line`\ to ensure idempotence. + When modifying a line the regexp should typically match both the initial state of the line as well as its state after replacement by ``line`` to ensure idempotence. | **required**: False | **type**: str @@ -64,22 +66,22 @@ state line The line to insert/replace into the USS file or data set. - Required for \ :literal:`state=present`\ . + Required for ``state=present``. - If \ :literal:`backrefs`\ is set, may contain backreferences that will get expanded with the \ :literal:`regexp`\ capture groups if the regexp matches. + If ``backrefs`` is set, may contain backreferences that will get expanded with the ``regexp`` capture groups if the regexp matches. | **required**: False | **type**: str backrefs - Used with \ :literal:`state=present`\ . + Used with ``state=present``. - If set, \ :literal:`line`\ can contain backreferences (both positional and named) that will get populated if the \ :literal:`regexp`\ matches. + If set, ``line`` can contain backreferences (both positional and named) that will get populated if the ``regexp`` matches. - This parameter changes the operation of the module slightly; \ :literal:`insertbefore`\ and \ :literal:`insertafter`\ will be ignored, and if the \ :literal:`regexp`\ does not match anywhere in the USS file or data set, the USS file or data set will be left unchanged. + This parameter changes the operation of the module slightly; ``insertbefore`` and ``insertafter`` will be ignored, and if the ``regexp`` does not match anywhere in the USS file or data set, the USS file or data set will be left unchanged. - If the \ :literal:`regexp`\ does match, the last matching line will be replaced by the expanded line parameter. + If the ``regexp`` does match, the last matching line will be replaced by the expanded line parameter. | **required**: False | **type**: bool @@ -87,23 +89,23 @@ backrefs insertafter - Used with \ :literal:`state=present`\ . + Used with ``state=present``. If specified, the line will be inserted after the last match of specified regular expression. If the first match is required, use(firstmatch=yes). - A special value is available; \ :literal:`EOF`\ for inserting the line at the end of the USS file or data set. + A special value is available; ``EOF`` for inserting the line at the end of the USS file or data set. If the specified regular expression has no matches, EOF will be used instead. - If \ :literal:`insertbefore`\ is set, default value \ :literal:`EOF`\ will be ignored. + If ``insertbefore`` is set, default value ``EOF`` will be ignored. - If regular expressions are passed to both \ :literal:`regexp`\ and \ :literal:`insertafter`\ , \ :literal:`insertafter`\ is only honored if no match for \ :literal:`regexp`\ is found. + If regular expressions are passed to both ``regexp`` and ``insertafter``, ``insertafter`` is only honored if no match for ``regexp`` is found. - May not be used with \ :literal:`backrefs`\ or \ :literal:`insertbefore`\ . + May not be used with ``backrefs`` or ``insertbefore``. - Choices are EOF or '\*regex\*' + Choices are EOF or '*regex*' Default is EOF @@ -112,33 +114,35 @@ insertafter insertbefore - Used with \ :literal:`state=present`\ . + Used with ``state=present``. If specified, the line will be inserted before the last match of specified regular expression. - If the first match is required, use \ :literal:`firstmatch=yes`\ . + If the first match is required, use ``firstmatch=yes``. - A value is available; \ :literal:`BOF`\ for inserting the line at the beginning of the USS file or data set. + A value is available; ``BOF`` for inserting the line at the beginning of the USS file or data set. If the specified regular expression has no matches, the line will be inserted at the end of the USS file or data set. - If regular expressions are passed to both \ :literal:`regexp`\ and \ :literal:`insertbefore`\ , \ :literal:`insertbefore`\ is only honored if no match for \ :literal:`regexp`\ is found. + If regular expressions are passed to both ``regexp`` and ``insertbefore``, ``insertbefore`` is only honored if no match for ``regexp`` is found. - May not be used with \ :literal:`backrefs`\ or \ :literal:`insertafter`\ . + May not be used with ``backrefs`` or ``insertafter``. - Choices are BOF or '\*regex\*' + Choices are BOF or '*regex*' | **required**: False | **type**: str backup - Creates a backup file or backup data set for \ :emphasis:`src`\ , including the timestamp information to ensure that you retrieve the original file. + Creates a backup file or backup data set for *src*, including the timestamp information to ensure that you retrieve the original file. - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + *backup_name* can be used to specify a backup file name if *backup=true*. The backup file name will be return on either success or failure of module execution such that data can be retrieved. + Use generation data set (GDS) relative positive name SOME.CREATION(+1) + | **required**: False | **type**: bool | **default**: False @@ -147,11 +151,11 @@ backup backup_name Specify the USS file name or data set name for the destination backup. - If the source \ :emphasis:`src`\ is a USS file or path, the backup\_name must be a file or path name, and the USS file or path must be an absolute path name. + If the source *src* is a USS file or path, the backup_name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup\_name must be an MVS data set name. + If the source is an MVS data set, the backup_name must be an MVS data set name. - If the backup\_name is not provided, the default backup\_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + If the backup_name is not provided, the default backup_name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -162,16 +166,16 @@ backup_name tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str firstmatch - Used with \ :literal:`insertafter`\ or \ :literal:`insertbefore`\ . + Used with ``insertafter`` or ``insertbefore``. - If set, \ :literal:`insertafter`\ and \ :literal:`insertbefore`\ will work with the first line that matches the given regular expression. + If set, ``insertafter`` and ``insertbefore`` will work with the first line that matches the given regular expression. | **required**: False | **type**: bool @@ -179,7 +183,7 @@ firstmatch encoding - The character set of the source \ :emphasis:`src`\ . \ `zos\_lineinfile <./zos_lineinfile.html>`__\ requires to be provided with correct encoding to read the content of USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. + The character set of the source *src*. `zos_lineinfile <./zos_lineinfile.html>`_ requires to be provided with correct encoding to read the content of USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -193,7 +197,7 @@ force This is helpful when a data set is being used in a long running process such as a started task and you are wanting to update or read. - The \ :literal:`force`\ option enables sharing of data sets through the disposition \ :emphasis:`DISP=SHR`\ . + The ``force`` option enables sharing of data sets through the disposition *DISP=SHR*. | **required**: False | **type**: bool @@ -248,6 +252,20 @@ Examples line: 'Should be a working test now' force: true + - name: Add a line to a gds + zos_lineinfile: + src: SOME.CREATION(-2) + insertafter: EOF + line: 'Should be a working test now' + + - name: Add a line to dataset and backup in a new generation of gds + zos_lineinfile: + src: SOME.CREATION.TEST + insertafter: EOF + backup: true + backup_name: CREATION.GDS(+1) + line: 'Should be a working test now' + @@ -259,7 +277,7 @@ Notes All data sets are always assumed to be cataloged. If an uncataloged data set needs to be encoded, it should be cataloged first. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. @@ -272,7 +290,7 @@ Return Values changed - Indicates if the source was modified. Value of 1 represents \`true\`, otherwise \`false\`. + Indicates if the source was modified. Value of 1 represents `true`, otherwise `false`. | **returned**: success | **type**: bool diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index 5bd283453..3b30be909 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -16,9 +16,9 @@ zos_mount -- Mount a z/OS file system. Synopsis -------- -- The module \ `zos\_mount <./zos_mount.html>`__\ can manage mount operations for a z/OS UNIX System Services (USS) file system data set. -- The \ :emphasis:`src`\ data set must be unique and a Fully Qualified Name (FQN). -- The \ :emphasis:`path`\ will be created if needed. +- The module `zos_mount <./zos_mount.html>`_ can manage mount operations for a z/OS UNIX System Services (USS) file system data set. +- The *src* data set must be unique and a Fully Qualified Name (FQN). +- The *path* will be created if needed. @@ -31,7 +31,7 @@ Parameters path The absolute path name onto which the file system is to be mounted. - The \ :emphasis:`path`\ is case sensitive and must be less than or equal 1023 characters long. + The *path* is case sensitive and must be less than or equal 1023 characters long. | **required**: True | **type**: str @@ -40,9 +40,9 @@ path src The name of the file system to be added to the file system hierarchy. - The file system \ :emphasis:`src`\ must be a data set of type \ :emphasis:`fs\_type`\ . + The file system *src* must be a data set of type *fs_type*. - The file system \ :emphasis:`src`\ data set must be cataloged. + The file system *src* data set must be cataloged. | **required**: True | **type**: str @@ -53,7 +53,7 @@ fs_type The physical file systems data set format to perform the logical mount. - The \ :emphasis:`fs\_type`\ is required to be lowercase. + The *fs_type* is required to be lowercase. | **required**: True | **type**: str @@ -63,25 +63,25 @@ fs_type state The desired status of the described mount (choice). - If \ :emphasis:`state=mounted`\ and \ :emphasis:`src`\ are not in use, the module will add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The \ :emphasis:`path`\ will be updated, the device will be mounted and the module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=mounted* and *src* are not in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will be updated, the device will be mounted and the module will complete successfully with *changed=True*. - If \ :emphasis:`state=mounted`\ and \ :emphasis:`src`\ are in use, the module will add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The \ :emphasis:`path`\ will not be updated, the device will not be mounted and the module will complete successfully with \ :emphasis:`changed=False`\ . + If *state=mounted* and *src* are in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will not be updated, the device will not be mounted and the module will complete successfully with *changed=False*. - If \ :emphasis:`state=unmounted`\ and \ :emphasis:`src`\ are in use, the module will \ :strong:`not`\ add the file system entry to the parmlib member \ :emphasis:`persistent/data\_store`\ . The device will be unmounted and the module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=unmounted* and *src* are in use, the module will **not** add the file system entry to the parmlib member *persistent/data_store*. The device will be unmounted and the module will complete successfully with *changed=True*. - If \ :emphasis:`state=unmounted`\ and \ :emphasis:`src`\ are not in use, the module will \ :strong:`not`\ add the file system entry to parmlib member \ :emphasis:`persistent/data\_store`\ .The device will remain unchanged and the module will complete with \ :emphasis:`changed=False`\ . + If *state=unmounted* and *src* are not in use, the module will **not** add the file system entry to parmlib member *persistent/data_store*.The device will remain unchanged and the module will complete with *changed=False*. - If \ :emphasis:`state=present`\ , the module will add the file system entry to the provided parmlib member \ :emphasis:`persistent/data\_store`\ if not present. The module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=present*, the module will add the file system entry to the provided parmlib member *persistent/data_store* if not present. The module will complete successfully with *changed=True*. - If \ :emphasis:`state=absent`\ , the module will remove the file system entry to the provided parmlib member \ :emphasis:`persistent/data\_store`\ if present. The module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=absent*, the module will remove the file system entry to the provided parmlib member *persistent/data_store* if present. The module will complete successfully with *changed=True*. - If \ :emphasis:`state=remounted`\ , the module will \ :strong:`not`\ add the file system entry to parmlib member \ :emphasis:`persistent/data\_store`\ . The device will be unmounted and mounted, the module will complete successfully with \ :emphasis:`changed=True`\ . + If *state=remounted*, the module will **not** add the file system entry to parmlib member *persistent/data_store*. The device will be unmounted and mounted, the module will complete successfully with *changed=True*. | **required**: False @@ -91,7 +91,7 @@ state persistent - Add or remove mount command entries to provided \ :emphasis:`data\_store`\ + Add or remove mount command entries to provided *data_store* | **required**: False | **type**: dict @@ -105,9 +105,9 @@ persistent backup - Creates a backup file or backup data set for \ :emphasis:`data\_store`\ , including the timestamp information to ensure that you retrieve the original parameters defined in \ :emphasis:`data\_store`\ . + Creates a backup file or backup data set for *data_store*, including the timestamp information to ensure that you retrieve the original parameters defined in *data_store*. - \ :emphasis:`backup\_name`\ can be used to specify a backup file name if \ :emphasis:`backup=true`\ . + *backup_name* can be used to specify a backup file name if *backup=true*. The backup file name will be returned on either success or failure of module execution such that data can be retrieved. @@ -119,11 +119,11 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source \ :emphasis:`data\_store`\ is a USS file or path, the \ :emphasis:`backup\_name`\ name can be relative or absolute for file or path name. + If the source *data_store* is a USS file or path, the *backup_name* name can be relative or absolute for file or path name. - If the source is an MVS data set, the backup\_name must be an MVS data set name. + If the source is an MVS data set, the backup_name must be an MVS data set name. - If the backup\_name is not provided, the default \ :emphasis:`backup\_name`\ will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, \ :literal:`/path/file\_name.2020-04-23-08-32-29-bak.tar`\ . + If the backup_name is not provided, the default *backup_name* will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp. For example, ``/path/file_name.2020-04-23-08-32-29-bak.tar``. If the source is an MVS data set, it will be a data set with a random name generated by calling the ZOAU API. The MVS backup data set recovery can be done by renaming it. @@ -132,9 +132,9 @@ persistent comment - If provided, this is used as a comment that surrounds the command in the \ :emphasis:`persistent/data\_store`\ + If provided, this is used as a comment that surrounds the command in the *persistent/data_store* - Comments are used to encapsulate the \ :emphasis:`persistent/data\_store`\ entry such that they can easily be understood and located. + Comments are used to encapsulate the *persistent/data_store* entry such that they can easily be understood and located. | **required**: False | **type**: list @@ -145,7 +145,7 @@ persistent unmount_opts Describes how the unmount will be performed. - For more on coded character set identifiers, review the IBM documentation topic \ :strong:`UNMOUNT - Remove a file system from the file hierarchy`\ . + For more on coded character set identifiers, review the IBM documentation topic **UNMOUNT - Remove a file system from the file hierarchy**. | **required**: False | **type**: str @@ -156,13 +156,13 @@ unmount_opts mount_opts Options available to the mount. - If \ :emphasis:`mount\_opts=ro`\ on a mounted/remount, mount is performed read-only. + If *mount_opts=ro* on a mounted/remount, mount is performed read-only. - If \ :emphasis:`mount\_opts=same`\ and (unmount\_opts=remount), mount is opened in the same mode as previously opened. + If *mount_opts=same* and (unmount_opts=remount), mount is opened in the same mode as previously opened. - If \ :emphasis:`mount\_opts=nowait`\ , mount is performed asynchronously. + If *mount_opts=nowait*, mount is performed asynchronously. - If \ :emphasis:`mount\_opts=nosecurity`\ , security checks are not enforced for files in this file system. + If *mount_opts=nosecurity*, security checks are not enforced for files in this file system. | **required**: False | **type**: str @@ -184,11 +184,11 @@ tag_untagged When the file system is unmounted, the tags are lost. - If \ :emphasis:`tag\_untagged=notext`\ none of the untagged files in the file system are automatically converted during file reading and writing. + If *tag_untagged=notext* none of the untagged files in the file system are automatically converted during file reading and writing. - If \ :emphasis:`tag\_untagged=text`\ each untagged file is implicitly marked as containing pure text data that can be converted. + If *tag_untagged=text* each untagged file is implicitly marked as containing pure text data that can be converted. - If this flag is used, use of tag\_ccsid is encouraged. + If this flag is used, use of tag_ccsid is encouraged. | **required**: False | **type**: str @@ -198,13 +198,13 @@ tag_untagged tag_ccsid Identifies the coded character set identifier (ccsid) to be implicitly set for the untagged file. - For more on coded character set identifiers, review the IBM documentation topic \ :strong:`Coded Character Sets`\ . + For more on coded character set identifiers, review the IBM documentation topic **Coded Character Sets**. Specified as a decimal value from 0 to 65535. However, when TEXT is specified, the value must be between 0 and 65535. The value is not checked as being valid and the corresponding code page is not checked as being installed. - Required when \ :emphasis:`tag\_untagged=TEXT`\ . + Required when *tag_untagged=TEXT*. | **required**: False | **type**: int @@ -214,10 +214,10 @@ allow_uid Specifies whether the SETUID and SETGID mode bits on an executable in this file system are considered. Also determines whether the APF extended attribute or the Program Control extended attribute is honored. - If \ :emphasis:`allow\_uid=True`\ the SETUID and SETGID mode bits are considered when a program in this file system is run. SETUID is the default. + If *allow_uid=True* the SETUID and SETGID mode bits are considered when a program in this file system is run. SETUID is the default. - If \ :emphasis:`allow\_uid=False`\ the SETUID and SETGID mode bits are ignored when a program in this file system is run. The program runs as though the SETUID and SETGID mode bits were not set. Also, if you specify the NOSETUID option on MOUNT, the APF extended attribute and the Program Control Bit values are ignored. + If *allow_uid=False* the SETUID and SETGID mode bits are ignored when a program in this file system is run. The program runs as though the SETUID and SETGID mode bits were not set. Also, if you specify the NOSETUID option on MOUNT, the APF extended attribute and the Program Control Bit values are ignored. | **required**: False @@ -226,10 +226,10 @@ allow_uid sysname - For systems participating in shared file system, \ :emphasis:`sysname`\ specifies the particular system on which a mount should be performed. This system will then become the owner of the file system mounted. This system must be IPLed with SYSPLEX(YES). + For systems participating in shared file system, *sysname* specifies the particular system on which a mount should be performed. This system will then become the owner of the file system mounted. This system must be IPLed with SYSPLEX(YES). - \ :emphasis:`sysname`\ is the name of a system participating in shared file system. The name must be 1-8 characters long; the valid characters are A-Z, 0-9, $, @, and #. + *sysname* is the name of a system participating in shared file system. The name must be 1-8 characters long; the valid characters are A-Z, 0-9, $, @, and #. | **required**: False @@ -240,13 +240,13 @@ automove These parameters apply only in a sysplex where systems are exploiting the shared file system capability. They specify what happens to the ownership of a file system when a shutdown, PFS termination, dead system takeover, or file system move occurs. The default setting is AUTOMOVE where the file system will be randomly moved to another system (no system list used). - \ :emphasis:`automove=automove`\ indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. + *automove=automove* indicates that ownership of the file system can be automatically moved to another system participating in a shared file system. - \ :emphasis:`automove=noautomove`\ prevents movement of the file system's ownership in some situations. + *automove=noautomove* prevents movement of the file system's ownership in some situations. - \ :emphasis:`automove=unmount`\ allows the file system to be unmounted in some situations. + *automove=unmount* allows the file system to be unmounted in some situations. | **required**: False @@ -275,7 +275,7 @@ automove_list tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -388,7 +388,7 @@ Notes If an uncataloged data set needs to be fetched, it should be cataloged first. - Uncataloged data sets can be cataloged using the \ `zos\_data\_set <./zos_data_set.html>`__\ module. + Uncataloged data sets can be cataloged using the `zos_data_set <./zos_data_set.html>`_ module. @@ -466,7 +466,7 @@ persistent | **sample**: SYS1.FILESYS(PRMAABAK) comment - The text that was used in markers around the \ :emphasis:`Persistent/data\_store`\ entry. + The text that was used in markers around the *Persistent/data_store* entry. | **returned**: always | **type**: list @@ -528,7 +528,7 @@ allow_uid true sysname - \ :emphasis:`sysname`\ specifies the particular system on which a mount should be performed. + *sysname* specifies the particular system on which a mount should be performed. | **returned**: if Non-None | **type**: str diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index f48418264..817951fe3 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -45,9 +45,9 @@ parm auth Determines whether this program should run with authorized privileges. - If \ :emphasis:`auth=true`\ , the program runs as APF authorized. + If *auth=true*, the program runs as APF authorized. - If \ :emphasis:`auth=false`\ , the program runs as unauthorized. + If *auth=false*, the program runs as unauthorized. | **required**: False | **type**: bool @@ -57,7 +57,7 @@ auth verbose Determines if verbose output should be returned from the underlying utility used by this module. - When \ :emphasis:`verbose=true`\ verbose output is returned on module failure. + When *verbose=true* verbose output is returned on module failure. | **required**: False | **type**: bool @@ -67,19 +67,19 @@ verbose dds The input data source. - \ :emphasis:`dds`\ supports 6 types of sources + *dds* supports 6 types of sources - 1. \ :emphasis:`dd\_data\_set`\ for data set files. + 1. *dd_data_set* for data set files. - 2. \ :emphasis:`dd\_unix`\ for UNIX files. + 2. *dd_unix* for UNIX files. - 3. \ :emphasis:`dd\_input`\ for in-stream data set. + 3. *dd_input* for in-stream data set. - 4. \ :emphasis:`dd\_dummy`\ for no content input. + 4. *dd_dummy* for no content input. - 5. \ :emphasis:`dd\_concat`\ for a data set concatenation. + 5. *dd_concat* for a data set concatenation. - 6. \ :emphasis:`dds`\ supports any combination of source types. + 6. *dds* supports any combination of source types. | **required**: False | **type**: list @@ -89,7 +89,7 @@ dds dd_data_set Specify a data set. - \ :emphasis:`dd\_data\_set`\ can reference an existing data set or be used to define a new data set to be created during execution. + *dd_data_set* can reference an existing data set or be used to define a new data set to be created during execution. | **required**: False | **type**: dict @@ -105,12 +105,16 @@ dds data_set_name The data set name. + A data set name can be a GDS relative name. + + When using GDS relative name and it is a positive generation, *disposition=new* must be used. + | **required**: False | **type**: str type - The data set type. Only required when \ :emphasis:`disposition=new`\ . + The data set type. Only required when *disposition=new*. Maps to DSNTYPE on z/OS. @@ -120,7 +124,7 @@ dds disposition - \ :emphasis:`disposition`\ indicates the status of a data set. + *disposition* indicates the status of a data set. Defaults to shr. @@ -130,7 +134,7 @@ dds disposition_normal - \ :emphasis:`disposition\_normal`\ indicates what to do with the data set after a normal termination of the program. + *disposition_normal* indicates what to do with the data set after a normal termination of the program. | **required**: False | **type**: str @@ -138,7 +142,7 @@ dds disposition_abnormal - \ :emphasis:`disposition\_abnormal`\ indicates what to do with the data set after an abnormal termination of the program. + *disposition_abnormal* indicates what to do with the data set after an abnormal termination of the program. | **required**: False | **type**: str @@ -146,15 +150,15 @@ dds reuse - Determines if a data set should be reused if \ :emphasis:`disposition=new`\ and if a data set with a matching name already exists. + Determines if a data set should be reused if *disposition=new* and if a data set with a matching name already exists. - If \ :emphasis:`reuse=true`\ , \ :emphasis:`disposition`\ will be automatically switched to \ :literal:`SHR`\ . + If *reuse=true*, *disposition* will be automatically switched to ``SHR``. - If \ :emphasis:`reuse=false`\ , and a data set with a matching name already exists, allocation will fail. + If *reuse=false*, and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with \ :emphasis:`replace`\ . + Mutually exclusive with *replace*. - \ :emphasis:`reuse`\ is only considered when \ :emphasis:`disposition=new`\ + *reuse* is only considered when *disposition=new* | **required**: False | **type**: bool @@ -162,17 +166,17 @@ dds replace - Determines if a data set should be replaced if \ :emphasis:`disposition=new`\ and a data set with a matching name already exists. + Determines if a data set should be replaced if *disposition=new* and a data set with a matching name already exists. - If \ :emphasis:`replace=true`\ , the original data set will be deleted, and a new data set created. + If *replace=true*, the original data set will be deleted, and a new data set created. - If \ :emphasis:`replace=false`\ , and a data set with a matching name already exists, allocation will fail. + If *replace=false*, and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with \ :emphasis:`reuse`\ . + Mutually exclusive with *reuse*. - \ :emphasis:`replace`\ is only considered when \ :emphasis:`disposition=new`\ + *replace* is only considered when *disposition=new* - \ :emphasis:`replace`\ will result in loss of all data in the original data set unless \ :emphasis:`backup`\ is specified. + *replace* will result in loss of all data in the original data set unless *backup* is specified. | **required**: False | **type**: bool @@ -180,9 +184,9 @@ dds backup - Determines if a backup should be made of an existing data set when \ :emphasis:`disposition=new`\ , \ :emphasis:`replace=true`\ , and a data set with the desired name is found. + Determines if a backup should be made of an existing data set when *disposition=new*, *replace=true*, and a data set with the desired name is found. - \ :emphasis:`backup`\ is only used when \ :emphasis:`replace=true`\ . + *backup* is only used when *replace=true*. | **required**: False | **type**: bool @@ -190,7 +194,7 @@ dds space_type - The unit of measurement to use when allocating space for a new data set using \ :emphasis:`space\_primary`\ and \ :emphasis:`space\_secondary`\ . + The unit of measurement to use when allocating space for a new data set using *space_primary* and *space_secondary*. | **required**: False | **type**: str @@ -200,9 +204,9 @@ dds space_primary The primary amount of space to allocate for a new data set. - The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. + The value provided to *space_type* is used as the unit of space for the allocation. - Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . + Not applicable when *space_type=blklgth* or *space_type=reclgth*. | **required**: False | **type**: int @@ -211,9 +215,9 @@ dds space_secondary When primary allocation of space is filled, secondary space will be allocated with the provided size as needed. - The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. + The value provided to *space_type* is used as the unit of space for the allocation. - Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . + Not applicable when *space_type=blklgth* or *space_type=reclgth*. | **required**: False | **type**: int @@ -231,7 +235,7 @@ dds sms_management_class The desired management class for a new SMS-managed data set. - \ :emphasis:`sms\_management\_class`\ is ignored if specified for an existing data set. + *sms_management_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -242,7 +246,7 @@ dds sms_storage_class The desired storage class for a new SMS-managed data set. - \ :emphasis:`sms\_storage\_class`\ is ignored if specified for an existing data set. + *sms_storage_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -253,7 +257,7 @@ dds sms_data_class The desired data class for a new SMS-managed data set. - \ :emphasis:`sms\_data\_class`\ is ignored if specified for an existing data set. + *sms_data_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -264,7 +268,7 @@ dds block_size The maximum length of a block in bytes. - Default is dependent on \ :emphasis:`record\_format`\ + Default is dependent on *record_format* | **required**: False | **type**: int @@ -280,9 +284,9 @@ dds key_label The label for the encryption key used by the system to encrypt the data set. - \ :emphasis:`key\_label`\ is the public name of a protected encryption key in the ICSF key repository. + *key_label* is the public name of a protected encryption key in the ICSF key repository. - \ :emphasis:`key\_label`\ should only be provided when creating an extended format data set. + *key_label* should only be provided when creating an extended format data set. Maps to DSKEYLBL on z/OS. @@ -304,7 +308,7 @@ dds Key label must have a private key associated with it. - \ :emphasis:`label`\ can be a maximum of 64 characters. + *label* can be a maximum of 64 characters. Maps to KEYLAB1 on z/OS. @@ -313,9 +317,9 @@ dds encoding - How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. - \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. + *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding. Maps to KEYCD1 on z/OS. @@ -339,7 +343,7 @@ dds Key label must have a private key associated with it. - \ :emphasis:`label`\ can be a maximum of 64 characters. + *label* can be a maximum of 64 characters. Maps to KEYLAB2 on z/OS. @@ -348,9 +352,9 @@ dds encoding - How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. - \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. + *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding. Maps to KEYCD2 on z/OS. @@ -363,7 +367,7 @@ dds key_length The length of the keys used in a new data set. - If using SMS, setting \ :emphasis:`key\_length`\ overrides the key length defined in the SMS data class of the data set. + If using SMS, setting *key_length* overrides the key length defined in the SMS data class of the data set. Valid values are (0-255 non-vsam), (1-255 vsam). @@ -376,14 +380,14 @@ dds The first byte of a logical record is position 0. - Provide \ :emphasis:`key\_offset`\ only for VSAM key-sequenced data sets. + Provide *key_offset* only for VSAM key-sequenced data sets. | **required**: False | **type**: int record_length - The logical record length. (e.g \ :literal:`80`\ ). + The logical record length. (e.g ``80``). For variable data sets, the length must include the 4-byte prefix area. @@ -417,11 +421,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -463,7 +467,7 @@ dds path The path to an existing UNIX file. - Or provide the path to an new created UNIX file when \ :emphasis:`status\_group=OCREAT`\ . + Or provide the path to an new created UNIX file when *status_group=OCREAT*. The provided path must be absolute. @@ -488,7 +492,7 @@ dds mode - The file access attributes when the UNIX file is created specified in \ :emphasis:`path`\ . + The file access attributes when the UNIX file is created specified in *path*. Specify the mode as an octal number similarly to chmod. @@ -499,47 +503,47 @@ dds status_group - The status for the UNIX file specified in \ :emphasis:`path`\ . + The status for the UNIX file specified in *path*. - If you do not specify a value for the \ :emphasis:`status\_group`\ parameter, the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. + If you do not specify a value for the *status_group* parameter, the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. Maps to PATHOPTS status group file options on z/OS. You can specify up to 6 choices. - \ :emphasis:`oappend`\ sets the file offset to the end of the file before each write, so that data is written at the end of the file. + *oappend* sets the file offset to the end of the file before each write, so that data is written at the end of the file. - \ :emphasis:`ocreat`\ specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, a new directory and a new file are not created. If the file already exists and \ :emphasis:`oexcl`\ was not specified, the system allows the program to use the existing file. If the file already exists and \ :emphasis:`oexcl`\ was specified, the system fails the allocation and the job step. + *ocreat* specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, a new directory and a new file are not created. If the file already exists and *oexcl* was not specified, the system allows the program to use the existing file. If the file already exists and *oexcl* was specified, the system fails the allocation and the job step. - \ :emphasis:`oexcl`\ specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores \ :emphasis:`oexcl`\ if \ :emphasis:`ocreat`\ is not also specified. + *oexcl* specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores *oexcl* if *ocreat* is not also specified. - \ :emphasis:`onoctty`\ specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. + *onoctty* specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. - \ :emphasis:`ononblock`\ specifies the following, depending on the type of file + *ononblock* specifies the following, depending on the type of file For a FIFO special file - 1. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`ordonly`\ access, an open function for reading-only returns without delay. + 1. With *ononblock* specified and *ordonly* access, an open function for reading-only returns without delay. - 2. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`ordonly`\ access, an open function for reading-only blocks (waits) until a process opens the file for writing. + 2. With *ononblock* not specified and *ordonly* access, an open function for reading-only blocks (waits) until a process opens the file for writing. - 3. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`owronly`\ access, an open function for writing-only returns an error if no process currently has the file open for reading. + 3. With *ononblock* specified and *owronly* access, an open function for writing-only returns an error if no process currently has the file open for reading. - 4. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`owronly`\ access, an open function for writing-only blocks (waits) until a process opens the file for reading. + 4. With *ononblock* not specified and *owronly* access, an open function for writing-only blocks (waits) until a process opens the file for reading. 5. For a character special file that supports nonblocking open - 6. If \ :emphasis:`ononblock`\ is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. + 6. If *ononblock* is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. - 7. If \ :emphasis:`ononblock`\ is not specified, an open function blocks (waits) until the device is ready or available. + 7. If *ononblock* is not specified, an open function blocks (waits) until the device is ready or available. - \ :emphasis:`ononblock`\ has no effect on other file types. + *ononblock* has no effect on other file types. - \ :emphasis:`osync`\ specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. + *osync* specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. - \ :emphasis:`otrunc`\ specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with \ :emphasis:`ordwr`\ or \ :emphasis:`owronly`\ . + *otrunc* specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with *ordwr* or *owronly*. - When \ :emphasis:`otrunc`\ is specified, the system does not change the mode and owner. \ :emphasis:`otrunc`\ has no effect on FIFO special files or character special files. + When *otrunc* is specified, the system does not change the mode and owner. *otrunc* has no effect on FIFO special files or character special files. | **required**: False | **type**: list @@ -548,7 +552,7 @@ dds access_group - The kind of access to request for the UNIX file specified in \ :emphasis:`path`\ . + The kind of access to request for the UNIX file specified in *path*. | **required**: False | **type**: str @@ -556,7 +560,7 @@ dds file_data_type - The type of data that is (or will be) stored in the file specified in \ :emphasis:`path`\ . + The type of data that is (or will be) stored in the file specified in *path*. Maps to FILEDATA on z/OS. @@ -569,7 +573,7 @@ dds block_size The block size, in bytes, for the UNIX file. - Default is dependent on \ :emphasis:`record\_format`\ + Default is dependent on *record_format* | **required**: False | **type**: int @@ -578,7 +582,7 @@ dds record_length The logical record length for the UNIX file. - \ :emphasis:`record\_length`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. + *record_length* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. Maps to LRECL on z/OS. @@ -589,7 +593,7 @@ dds record_format The record format for the UNIX file. - \ :emphasis:`record\_format`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. + *record_format* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. | **required**: False | **type**: str @@ -608,11 +612,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -638,7 +642,7 @@ dds dd_input - \ :emphasis:`dd\_input`\ is used to specify an in-stream data set. + *dd_input* is used to specify an in-stream data set. Input will be saved to a temporary data set with a record length of 80. @@ -656,15 +660,15 @@ dds content The input contents for the DD. - \ :emphasis:`dd\_input`\ supports single or multiple lines of input. + *dd_input* supports single or multiple lines of input. Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. If a list of strings is provided, newlines will be added to each of the lines when used as input. - If a multi-line string is provided, use the proper block scalar style. YAML supports both \ `literal <https://yaml.org/spec/1.2.2/#literal-style>`__\ and \ `folded <https://yaml.org/spec/1.2.2/#line-folding>`__\ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; \ :emphasis:`content: | 2`\ is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block \ `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`__\ indicators "+" and "-" as well. + If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. - When using the \ :emphasis:`content`\ option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all \ :emphasis:`content`\ types; string, list of strings and when using a YAML block indicator. + When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. | **required**: True | **type**: raw @@ -682,11 +686,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -696,7 +700,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. + for *dd_input*, *src_encoding* should generally not need to be changed. | **required**: False | **type**: str @@ -714,7 +718,7 @@ dds dd_output - Use \ :emphasis:`dd\_output`\ to specify - Content sent to the DD should be returned to the user. + Use *dd_output* to specify - Content sent to the DD should be returned to the user. | **required**: False | **type**: dict @@ -739,11 +743,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -753,7 +757,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. + for *dd_input*, *src_encoding* should generally not need to be changed. | **required**: False | **type**: str @@ -771,9 +775,9 @@ dds dd_dummy - Use \ :emphasis:`dd\_dummy`\ to specify - No device or external storage space is to be allocated to the data set. - No disposition processing is to be performed on the data set. + Use *dd_dummy* to specify - No device or external storage space is to be allocated to the data set. - No disposition processing is to be performed on the data set. - \ :emphasis:`dd\_dummy`\ accepts no content input. + *dd_dummy* accepts no content input. | **required**: False | **type**: dict @@ -788,7 +792,7 @@ dds dd_vio - \ :emphasis:`dd\_vio`\ is used to handle temporary data sets. + *dd_vio* is used to handle temporary data sets. VIO data sets reside in the paging space; but, to the problem program and the access method, the data sets appear to reside on a direct access storage device. @@ -807,7 +811,7 @@ dds dd_concat - \ :emphasis:`dd\_concat`\ is used to specify a data set concatenation. + *dd_concat* is used to specify a data set concatenation. | **required**: False | **type**: dict @@ -821,7 +825,7 @@ dds dds - A list of DD statements, which can contain any of the following types: \ :emphasis:`dd\_data\_set`\ , \ :emphasis:`dd\_unix`\ , and \ :emphasis:`dd\_input`\ . + A list of DD statements, which can contain any of the following types: *dd_data_set*, *dd_unix*, and *dd_input*. | **required**: False | **type**: list @@ -831,7 +835,7 @@ dds dd_data_set Specify a data set. - \ :emphasis:`dd\_data\_set`\ can reference an existing data set. The data set referenced with \ :literal:`data\_set\_name`\ must be allocated before the module \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ is run, you can use \ `zos\_data\_set <./zos_data_set.html>`__\ to allocate a data set. + *dd_data_set* can reference an existing data set. The data set referenced with ``data_set_name`` must be allocated before the module `zos_mvs_raw <./zos_mvs_raw.html>`_ is run, you can use `zos_data_set <./zos_data_set.html>`_ to allocate a data set. | **required**: False | **type**: dict @@ -840,12 +844,16 @@ dds data_set_name The data set name. + A data set name can be a GDS relative name. + + When using GDS relative name and it is a positive generation, *disposition=new* must be used. + | **required**: False | **type**: str type - The data set type. Only required when \ :emphasis:`disposition=new`\ . + The data set type. Only required when *disposition=new*. Maps to DSNTYPE on z/OS. @@ -855,7 +863,7 @@ dds disposition - \ :emphasis:`disposition`\ indicates the status of a data set. + *disposition* indicates the status of a data set. Defaults to shr. @@ -865,7 +873,7 @@ dds disposition_normal - \ :emphasis:`disposition\_normal`\ indicates what to do with the data set after normal termination of the program. + *disposition_normal* indicates what to do with the data set after normal termination of the program. | **required**: False | **type**: str @@ -873,7 +881,7 @@ dds disposition_abnormal - \ :emphasis:`disposition\_abnormal`\ indicates what to do with the data set after abnormal termination of the program. + *disposition_abnormal* indicates what to do with the data set after abnormal termination of the program. | **required**: False | **type**: str @@ -881,15 +889,15 @@ dds reuse - Determines if data set should be reused if \ :emphasis:`disposition=new`\ and a data set with matching name already exists. + Determines if data set should be reused if *disposition=new* and a data set with matching name already exists. - If \ :emphasis:`reuse=true`\ , \ :emphasis:`disposition`\ will be automatically switched to \ :literal:`SHR`\ . + If *reuse=true*, *disposition* will be automatically switched to ``SHR``. - If \ :emphasis:`reuse=false`\ , and a data set with a matching name already exists, allocation will fail. + If *reuse=false*, and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with \ :emphasis:`replace`\ . + Mutually exclusive with *replace*. - \ :emphasis:`reuse`\ is only considered when \ :emphasis:`disposition=new`\ + *reuse* is only considered when *disposition=new* | **required**: False | **type**: bool @@ -897,17 +905,17 @@ dds replace - Determines if data set should be replaced if \ :emphasis:`disposition=new`\ and a data set with matching name already exists. + Determines if data set should be replaced if *disposition=new* and a data set with matching name already exists. - If \ :emphasis:`replace=true`\ , the original data set will be deleted, and a new data set created. + If *replace=true*, the original data set will be deleted, and a new data set created. - If \ :emphasis:`replace=false`\ , and a data set with a matching name already exists, allocation will fail. + If *replace=false*, and a data set with a matching name already exists, allocation will fail. - Mutually exclusive with \ :emphasis:`reuse`\ . + Mutually exclusive with *reuse*. - \ :emphasis:`replace`\ is only considered when \ :emphasis:`disposition=new`\ + *replace* is only considered when *disposition=new* - \ :emphasis:`replace`\ will result in loss of all data in the original data set unless \ :emphasis:`backup`\ is specified. + *replace* will result in loss of all data in the original data set unless *backup* is specified. | **required**: False | **type**: bool @@ -915,9 +923,9 @@ dds backup - Determines if a backup should be made of existing data set when \ :emphasis:`disposition=new`\ , \ :emphasis:`replace=true`\ , and a data set with the desired name is found. + Determines if a backup should be made of existing data set when *disposition=new*, *replace=true*, and a data set with the desired name is found. - \ :emphasis:`backup`\ is only used when \ :emphasis:`replace=true`\ . + *backup* is only used when *replace=true*. | **required**: False | **type**: bool @@ -925,7 +933,7 @@ dds space_type - The unit of measurement to use when allocating space for a new data set using \ :emphasis:`space\_primary`\ and \ :emphasis:`space\_secondary`\ . + The unit of measurement to use when allocating space for a new data set using *space_primary* and *space_secondary*. | **required**: False | **type**: str @@ -935,9 +943,9 @@ dds space_primary The primary amount of space to allocate for a new data set. - The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. + The value provided to *space_type* is used as the unit of space for the allocation. - Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . + Not applicable when *space_type=blklgth* or *space_type=reclgth*. | **required**: False | **type**: int @@ -946,9 +954,9 @@ dds space_secondary When primary allocation of space is filled, secondary space will be allocated with the provided size as needed. - The value provided to \ :emphasis:`space\_type`\ is used as the unit of space for the allocation. + The value provided to *space_type* is used as the unit of space for the allocation. - Not applicable when \ :emphasis:`space\_type=blklgth`\ or \ :emphasis:`space\_type=reclgth`\ . + Not applicable when *space_type=blklgth* or *space_type=reclgth*. | **required**: False | **type**: int @@ -966,7 +974,7 @@ dds sms_management_class The desired management class for a new SMS-managed data set. - \ :emphasis:`sms\_management\_class`\ is ignored if specified for an existing data set. + *sms_management_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -977,7 +985,7 @@ dds sms_storage_class The desired storage class for a new SMS-managed data set. - \ :emphasis:`sms\_storage\_class`\ is ignored if specified for an existing data set. + *sms_storage_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -988,7 +996,7 @@ dds sms_data_class The desired data class for a new SMS-managed data set. - \ :emphasis:`sms\_data\_class`\ is ignored if specified for an existing data set. + *sms_data_class* is ignored if specified for an existing data set. All values must be between 1-8 alpha-numeric characters. @@ -999,7 +1007,7 @@ dds block_size The maximum length of a block in bytes. - Default is dependent on \ :emphasis:`record\_format`\ + Default is dependent on *record_format* | **required**: False | **type**: int @@ -1015,9 +1023,9 @@ dds key_label The label for the encryption key used by the system to encrypt the data set. - \ :emphasis:`key\_label`\ is the public name of a protected encryption key in the ICSF key repository. + *key_label* is the public name of a protected encryption key in the ICSF key repository. - \ :emphasis:`key\_label`\ should only be provided when creating an extended format data set. + *key_label* should only be provided when creating an extended format data set. Maps to DSKEYLBL on z/OS. @@ -1039,7 +1047,7 @@ dds Key label must have a private key associated with it. - \ :emphasis:`label`\ can be a maximum of 64 characters. + *label* can be a maximum of 64 characters. Maps to KEYLAB1 on z/OS. @@ -1048,9 +1056,9 @@ dds encoding - How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. - \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. + *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding. Maps to KEYCD1 on z/OS. @@ -1074,7 +1082,7 @@ dds Key label must have a private key associated with it. - \ :emphasis:`label`\ can be a maximum of 64 characters. + *label* can be a maximum of 64 characters. Maps to KEYLAB2 on z/OS. @@ -1083,9 +1091,9 @@ dds encoding - How the label for the key encrypting key specified by \ :emphasis:`label`\ is encoded by the Encryption Key Manager. + How the label for the key encrypting key specified by *label* is encoded by the Encryption Key Manager. - \ :emphasis:`encoding`\ can either be set to \ :literal:`l`\ for label encoding, or \ :literal:`h`\ for hash encoding. + *encoding* can either be set to ``l`` for label encoding, or ``h`` for hash encoding. Maps to KEYCD2 on z/OS. @@ -1098,7 +1106,7 @@ dds key_length The length of the keys used in a new data set. - If using SMS, setting \ :emphasis:`key\_length`\ overrides the key length defined in the SMS data class of the data set. + If using SMS, setting *key_length* overrides the key length defined in the SMS data class of the data set. Valid values are (0-255 non-vsam), (1-255 vsam). @@ -1111,14 +1119,14 @@ dds The first byte of a logical record is position 0. - Provide \ :emphasis:`key\_offset`\ only for VSAM key-sequenced data sets. + Provide *key_offset* only for VSAM key-sequenced data sets. | **required**: False | **type**: int record_length - The logical record length. (e.g \ :literal:`80`\ ). + The logical record length. (e.g ``80``). For variable data sets, the length must include the 4-byte prefix area. @@ -1152,11 +1160,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -1191,7 +1199,7 @@ dds path The path to an existing UNIX file. - Or provide the path to an new created UNIX file when \ :emphasis:`status\_group=ocreat`\ . + Or provide the path to an new created UNIX file when *status_group=ocreat*. The provided path must be absolute. @@ -1216,7 +1224,7 @@ dds mode - The file access attributes when the UNIX file is created specified in \ :emphasis:`path`\ . + The file access attributes when the UNIX file is created specified in *path*. Specify the mode as an octal number similar to chmod. @@ -1227,47 +1235,47 @@ dds status_group - The status for the UNIX file specified in \ :emphasis:`path`\ . + The status for the UNIX file specified in *path*. - If you do not specify a value for the \ :emphasis:`status\_group`\ parameter the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. + If you do not specify a value for the *status_group* parameter the module assumes that the pathname exists, searches for it, and fails the module if the pathname does not exist. Maps to PATHOPTS status group file options on z/OS. You can specify up to 6 choices. - \ :emphasis:`oappend`\ sets the file offset to the end of the file before each write, so that data is written at the end of the file. + *oappend* sets the file offset to the end of the file before each write, so that data is written at the end of the file. - \ :emphasis:`ocreat`\ specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, one is not created, and the new file is not created. If the file already exists and \ :emphasis:`oexcl`\ was not specified, the system allows the program to use the existing file. If the file already exists and \ :emphasis:`oexcl`\ was specified, the system fails the allocation and the job step. + *ocreat* specifies that if the file does not exist, the system is to create it. If a directory specified in the pathname does not exist, one is not created, and the new file is not created. If the file already exists and *oexcl* was not specified, the system allows the program to use the existing file. If the file already exists and *oexcl* was specified, the system fails the allocation and the job step. - \ :emphasis:`oexcl`\ specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores \ :emphasis:`oexcl`\ if \ :emphasis:`ocreat`\ is not also specified. + *oexcl* specifies that if the file does not exist, the system is to create it. If the file already exists, the system fails the allocation and the job step. The system ignores *oexcl* if *ocreat* is not also specified. - \ :emphasis:`onoctty`\ specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. + *onoctty* specifies that if the PATH parameter identifies a terminal device, opening of the file does not make the terminal device the controlling terminal for the process. - \ :emphasis:`ononblock`\ specifies the following, depending on the type of file + *ononblock* specifies the following, depending on the type of file For a FIFO special file - 1. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`ordonly`\ access, an open function for reading-only returns without delay. + 1. With *ononblock* specified and *ordonly* access, an open function for reading-only returns without delay. - 2. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`ordonly`\ access, an open function for reading-only blocks (waits) until a process opens the file for writing. + 2. With *ononblock* not specified and *ordonly* access, an open function for reading-only blocks (waits) until a process opens the file for writing. - 3. With \ :emphasis:`ononblock`\ specified and \ :emphasis:`owronly`\ access, an open function for writing-only returns an error if no process currently has the file open for reading. + 3. With *ononblock* specified and *owronly* access, an open function for writing-only returns an error if no process currently has the file open for reading. - 4. With \ :emphasis:`ononblock`\ not specified and \ :emphasis:`owronly`\ access, an open function for writing-only blocks (waits) until a process opens the file for reading. + 4. With *ononblock* not specified and *owronly* access, an open function for writing-only blocks (waits) until a process opens the file for reading. 5. For a character special file that supports nonblocking open - 6. If \ :emphasis:`ononblock`\ is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. + 6. If *ononblock* is specified, an open function returns without blocking (waiting) until the device is ready or available. Device response depends on the type of device. - 7. If \ :emphasis:`ononblock`\ is not specified, an open function blocks (waits) until the device is ready or available. + 7. If *ononblock* is not specified, an open function blocks (waits) until the device is ready or available. - \ :emphasis:`ononblock`\ has no effect on other file types. + *ononblock* has no effect on other file types. - \ :emphasis:`osync`\ specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. + *osync* specifies that the system is to move data from buffer storage to permanent storage before returning control from a callable service that performs a write. - \ :emphasis:`otrunc`\ specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with \ :emphasis:`ordwr`\ or \ :emphasis:`owronly`\ . + *otrunc* specifies that the system is to truncate the file length to zero if all the following are true: the file specified exists, the file is a regular file, and the file successfully opened with *ordwr* or *owronly*. - When \ :emphasis:`otrunc`\ is specified, the system does not change the mode and owner. \ :emphasis:`otrunc`\ has no effect on FIFO special files or character special files. + When *otrunc* is specified, the system does not change the mode and owner. *otrunc* has no effect on FIFO special files or character special files. | **required**: False | **type**: list @@ -1276,7 +1284,7 @@ dds access_group - The kind of access to request for the UNIX file specified in \ :emphasis:`path`\ . + The kind of access to request for the UNIX file specified in *path*. | **required**: False | **type**: str @@ -1284,7 +1292,7 @@ dds file_data_type - The type of data that is (or will be) stored in the file specified in \ :emphasis:`path`\ . + The type of data that is (or will be) stored in the file specified in *path*. Maps to FILEDATA on z/OS. @@ -1297,7 +1305,7 @@ dds block_size The block size, in bytes, for the UNIX file. - Default is dependent on \ :emphasis:`record\_format`\ + Default is dependent on *record_format* | **required**: False | **type**: int @@ -1306,7 +1314,7 @@ dds record_length The logical record length for the UNIX file. - \ :emphasis:`record\_length`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. + *record_length* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. Maps to LRECL on z/OS. @@ -1317,7 +1325,7 @@ dds record_format The record format for the UNIX file. - \ :emphasis:`record\_format`\ is required in situations where the data will be processed as records and therefore, \ :emphasis:`record\_length`\ , \ :emphasis:`block\_size`\ and \ :emphasis:`record\_format`\ need to be supplied since a UNIX file would normally be treated as a stream of bytes. + *record_format* is required in situations where the data will be processed as records and therefore, *record_length*, *block_size* and *record_format* need to be supplied since a UNIX file would normally be treated as a stream of bytes. | **required**: False | **type**: str @@ -1336,11 +1344,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -1366,7 +1374,7 @@ dds dd_input - \ :emphasis:`dd\_input`\ is used to specify an in-stream data set. + *dd_input* is used to specify an in-stream data set. Input will be saved to a temporary data set with a record length of 80. @@ -1377,15 +1385,15 @@ dds content The input contents for the DD. - \ :emphasis:`dd\_input`\ supports single or multiple lines of input. + *dd_input* supports single or multiple lines of input. Multi-line input can be provided as a multi-line string or a list of strings with 1 line per list item. If a list of strings is provided, newlines will be added to each of the lines when used as input. - If a multi-line string is provided, use the proper block scalar style. YAML supports both \ `literal <https://yaml.org/spec/1.2.2/#literal-style>`__\ and \ `folded <https://yaml.org/spec/1.2.2/#line-folding>`__\ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; \ :emphasis:`content: | 2`\ is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block \ `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`__\ indicators "+" and "-" as well. + If a multi-line string is provided, use the proper block scalar style. YAML supports both `literal <https://yaml.org/spec/1.2.2/#literal-style>`_ and `folded <https://yaml.org/spec/1.2.2/#line-folding>`_ scalars. It is recommended to use the literal style indicator "|" with a block indentation indicator, for example; *content: | 2* is a literal block style indicator with a 2 space indentation, the entire block will be indented and newlines preserved. The block indentation range is 1 - 9. While generally unnecessary, YAML does support block `chomping <https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator>`_ indicators "+" and "-" as well. - When using the \ :emphasis:`content`\ option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all \ :emphasis:`content`\ types; string, list of strings and when using a YAML block indicator. + When using the *content* option for instream-data, the module will ensure that all lines contain a blank in columns 1 and 2 and add blanks when not present while retaining a maximum length of 80 columns for any line. This is true for all *content* types; string, list of strings and when using a YAML block indicator. | **required**: True | **type**: raw @@ -1403,11 +1411,11 @@ dds type The type of the content to be returned. - \ :literal:`text`\ means return content in encoding specified by \ :emphasis:`response\_encoding`\ . + ``text`` means return content in encoding specified by *response_encoding*. - \ :emphasis:`src\_encoding`\ and \ :emphasis:`response\_encoding`\ are only used when \ :emphasis:`type=text`\ . + *src_encoding* and *response_encoding* are only used when *type=text*. - \ :literal:`base64`\ means return content in binary mode. + ``base64`` means return content in binary mode. | **required**: True | **type**: str @@ -1417,7 +1425,7 @@ dds src_encoding The encoding of the data set on the z/OS system. - for \ :emphasis:`dd\_input`\ , \ :emphasis:`src\_encoding`\ should generally not need to be changed. + for *dd_input*, *src_encoding* should generally not need to be changed. | **required**: False | **type**: str @@ -1440,7 +1448,7 @@ dds tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -1748,6 +1756,37 @@ Examples VOLUMES(222222) - UNIQUE) + - name: List data sets matching pattern in catalog, + save output to a new generation of gdgs. + zos_mvs_raw: + program_name: idcams + auth: true + dds: + - dd_data_set: + dd_name: sysprint + data_set_name: TEST.CREATION(+1) + disposition: new + return_content: + type: text + - dd_input: + dd_name: sysin + content: " LISTCAT ENTRIES('SOME.DATASET.*')" + + - name: List data sets matching pattern in catalog, + save output to a gds already created. + zos_mvs_raw: + program_name: idcams + auth: true + dds: + - dd_data_set: + dd_name: sysprint + data_set_name: TEST.CREATION(-2) + return_content: + type: text + - dd_input: + dd_name: sysin + content: " LISTCAT ENTRIES('SOME.DATASET.*')" + @@ -1755,11 +1794,11 @@ Notes ----- .. note:: - When executing programs using \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ , you may encounter errors that originate in the programs implementation. Two such known issues are noted below of which one has been addressed with an APAR. + When executing programs using `zos_mvs_raw <./zos_mvs_raw.html>`_, you may encounter errors that originate in the programs implementation. Two such known issues are noted below of which one has been addressed with an APAR. - 1. \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ module execution fails when invoking Database Image Copy 2 Utility or Database Recovery Utility in conjunction with FlashCopy or Fast Replication. + 1. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking Database Image Copy 2 Utility or Database Recovery Utility in conjunction with FlashCopy or Fast Replication. - 2. \ `zos\_mvs\_raw <./zos_mvs_raw.html>`__\ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. + 2. `zos_mvs_raw <./zos_mvs_raw.html>`_ module execution fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". This issue is addressed by APAR PH28089. 3. When executing a program, refer to the programs documentation as each programs requirments can vary fom DDs, instream-data indentation and continuation characters. @@ -1837,7 +1876,7 @@ backups | **type**: str backup_name - The name of the data set containing the backup of content from data set in original\_name. + The name of the data set containing the backup of content from data set in original_name. | **type**: str diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 8f7e76df1..2bd53fc83 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -56,7 +56,7 @@ wait_time_s This option is helpful on a busy system requiring more time to execute commands. - Setting \ :emphasis:`wait`\ can instruct if execution should wait the full \ :emphasis:`wait\_time\_s`\ . + Setting *wait* can instruct if execution should wait the full *wait_time_s*. | **required**: False | **type**: int @@ -100,7 +100,7 @@ Notes ----- .. note:: - Commands may need to use specific prefixes like $, they can be discovered by issuing the following command \ :literal:`D OPDATA,PREFIX`\ . + Commands may need to use specific prefixes like $, they can be discovered by issuing the following command ``D OPDATA,PREFIX``. diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index b7956c8b8..ba9398b50 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -31,7 +31,7 @@ system If the system name is not specified, all outstanding messages for that system and for the local systems attached to it are returned. - A trailing asterisk, (\*) wildcard is supported. + A trailing asterisk, (*) wildcard is supported. | **required**: False | **type**: str @@ -42,7 +42,7 @@ message_id If the message identifier is not specified, all outstanding messages for all message identifiers are returned. - A trailing asterisk, (\*) wildcard is supported. + A trailing asterisk, (*) wildcard is supported. | **required**: False | **type**: str @@ -53,7 +53,7 @@ job_name If the message job name is not specified, all outstanding messages for all job names are returned. - A trailing asterisk, (\*) wildcard is supported. + A trailing asterisk, (*) wildcard is supported. | **required**: False | **type**: str @@ -69,24 +69,24 @@ message_filter filter - Specifies the substring or regex to match to the outstanding messages, see \ :emphasis:`use\_regex`\ . + Specifies the substring or regex to match to the outstanding messages, see *use_regex*. All special characters in a filter string that are not a regex are escaped. - Valid Python regular expressions are supported. See \ `the official documentation <https://docs.python.org/library/re.html>`__\ for more information. + Valid Python regular expressions are supported. See `the official documentation <https://docs.python.org/library/re.html>`_ for more information. - Regular expressions are compiled with the flag \ :strong:`re.DOTALL`\ which makes the \ :strong:`'.'`\ special character match any character including a newline." + Regular expressions are compiled with the flag **re.DOTALL** which makes the **'.'** special character match any character including a newline." | **required**: True | **type**: str use_regex - Indicates that the value for \ :emphasis:`filter`\ is a regex or a string to match. + Indicates that the value for *filter* is a regex or a string to match. - If False, the module assumes that \ :emphasis:`filter`\ is not a regex and matches the \ :emphasis:`filter`\ substring on the outstanding messages. + If False, the module assumes that *filter* is not a regex and matches the *filter* substring on the outstanding messages. - If True, the module creates a regex from the \ :emphasis:`filter`\ string and matches it to the outstanding messages. + If True, the module creates a regex from the *filter* string and matches it to the outstanding messages. | **required**: False | **type**: bool @@ -222,7 +222,7 @@ actions | **sample**: STC01537 message_text - Content of the outstanding message requiring operator action awaiting a reply. If \ :emphasis:`message\_filter`\ is set, \ :emphasis:`message\_text`\ will be filtered accordingly. + Content of the outstanding message requiring operator action awaiting a reply. If *message_filter* is set, *message_text* will be filtered accordingly. | **returned**: success | **type**: str diff --git a/docs/source/modules/zos_ping.rst b/docs/source/modules/zos_ping.rst index acb901790..a4405b473 100644 --- a/docs/source/modules/zos_ping.rst +++ b/docs/source/modules/zos_ping.rst @@ -16,9 +16,9 @@ zos_ping -- Ping z/OS and check dependencies. Synopsis -------- -- \ `zos\_ping <./zos_ping.html>`__\ verifies the presence of z/OS Web Client Enablement Toolkit, iconv, and Python. -- \ `zos\_ping <./zos_ping.html>`__\ returns \ :literal:`pong`\ when the target host is not missing any required dependencies. -- If the target host is missing optional dependencies, the \ `zos\_ping <./zos_ping.html>`__\ will return one or more warning messages. +- `zos_ping <./zos_ping.html>`_ verifies the presence of z/OS Web Client Enablement Toolkit, iconv, and Python. +- `zos_ping <./zos_ping.html>`_ returns ``pong`` when the target host is not missing any required dependencies. +- If the target host is missing optional dependencies, the `zos_ping <./zos_ping.html>`_ will return one or more warning messages. - If a required dependency is missing from the target host, an explanatory message will be returned with the module failure. @@ -44,7 +44,7 @@ Notes ----- .. note:: - This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry \ :literal:`scp\_extra\_args="-O"`\ into the ini file named \ :literal:`ansible.cfg`\ . + This module is written in REXX and relies on the SCP protocol to transfer the source to the managed z/OS node and encode it in the managed nodes default encoding, eg IBM-1047. Starting with OpenSSH 9.0, it switches from SCP to use SFTP by default, meaning transfers are no longer treated as text and are transferred as binary preserving the source files encoding resulting in a module failure. If you are using OpenSSH 9.0 (ssh -V) or later, you can instruct SSH to use SCP by adding the entry ``scp_extra_args="-O"`` into the ini file named ``ansible.cfg``. diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index d2977c486..10660d38a 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -16,7 +16,7 @@ zos_script -- Run scripts in z/OS Synopsis -------- -- The \ `zos\_script <./zos_script.html>`__\ module runs a local or remote script in the remote machine. +- The `zos_script <./zos_script.html>`_ module runs a local or remote script in the remote machine. @@ -56,7 +56,7 @@ creates encoding Specifies which encodings the script should be converted from and to. - If \ :literal:`encoding`\ is not provided, the module determines which local and remote charsets to convert the data from and to. + If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. | **required**: False | **type**: dict @@ -87,9 +87,9 @@ executable remote_src - If set to \ :literal:`false`\ , the module will search the script in the controller. + If set to ``false``, the module will search the script in the controller. - If set to \ :literal:`true`\ , the module will search the script in the remote machine. + If set to ``true``, the module will search the script in the remote machine. | **required**: False | **type**: bool @@ -103,13 +103,13 @@ removes use_template - Whether the module should treat \ :literal:`src`\ as a Jinja2 template and render it before continuing with the rest of the module. + Whether the module should treat ``src`` as a Jinja2 template and render it before continuing with the rest of the module. - Only valid when \ :literal:`src`\ is a local file or directory. + Only valid when ``src`` is a local file or directory. - All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as \ `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`__\ , such as \ :literal:`playbook\_dir`\ , \ :literal:`ansible\_version`\ , etc. + All variables defined in inventory files, vars files and the playbook will be passed to the template engine, as well as `Ansible special variables <https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables>`_, such as ``playbook_dir``, ``ansible_version``, etc. - If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order \ `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`__\ + If variables defined in different scopes share the same name, Ansible will apply variable precedence to them. You can see the complete precedence order `in Ansible's documentation <https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence>`_ | **required**: False | **type**: bool @@ -119,9 +119,9 @@ use_template template_parameters Options to set the way Jinja2 will process templates. - Jinja2 already sets defaults for the markers it uses, you can find more information at its \ `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`__\ . + Jinja2 already sets defaults for the markers it uses, you can find more information at its `official documentation <https://jinja.palletsprojects.com/en/latest/templates/>`_. - These options are ignored unless \ :literal:`use\_template`\ is true. + These options are ignored unless ``use_template`` is true. | **required**: False | **type**: dict @@ -200,7 +200,7 @@ template_parameters trim_blocks Whether Jinja2 should remove the first newline after a block is removed. - Setting this option to \ :literal:`False`\ will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. + Setting this option to ``False`` will result in newlines being added to the rendered template. This could create invalid code when working with JCL templates or empty records in destination data sets. | **required**: False | **type**: bool @@ -290,7 +290,7 @@ Notes .. note:: When executing local scripts, temporary storage will be used on the remote z/OS system. The size of the temporary storage will correspond to the size of the file being copied. - The location in the z/OS system where local scripts will be copied to can be configured through Ansible's \ :literal:`remote\_tmp`\ option. Refer to \ `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`__\ for more information. + The location in the z/OS system where local scripts will be copied to can be configured through Ansible's ``remote_tmp`` option. Refer to `Ansible's documentation <https://docs.ansible.com/ansible/latest/collections/ansible/builtin/sh_shell.html#parameter-remote_tmp>`_ for more information. All local scripts copied to a remote z/OS system will be removed from the managed node before the module finishes executing. @@ -298,13 +298,13 @@ Notes The module will only add execution permissions for the file owner. - If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error \ :literal:`BPXW0003I`\ . + If executing REXX scripts, make sure to include a newline character on each line of the file. Otherwise, the interpreter may fail and return error ``BPXW0003I``. - For supported character sets used to encode data, refer to the \ `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`__\ . + For supported character sets used to encode data, refer to the `documentation <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/resources/character_set.html>`_. - This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. - This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with \ `zos\_tso\_command <./zos_tso_command.html>`__\ . + This module executes scripts inside z/OS UNIX System Services. For running REXX scripts contained in data sets or CLISTs, consider issuing a TSO command with `zos_tso_command <./zos_tso_command.html>`_. The community script module does not rely on Python to execute scripts on a managed node, while this module does. Python must be present on the remote machine. diff --git a/docs/source/modules/zos_tso_command.rst b/docs/source/modules/zos_tso_command.rst index b35c13a1b..4af6b1b52 100644 --- a/docs/source/modules/zos_tso_command.rst +++ b/docs/source/modules/zos_tso_command.rst @@ -40,7 +40,7 @@ commands max_rc Specifies the maximum return code allowed for a TSO command. - If more than one TSO command is submitted, the \ :emphasis:`max\_rc`\ applies to all TSO commands. + If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. | **required**: False | **type**: int @@ -119,7 +119,7 @@ output max_rc Specifies the maximum return code allowed for a TSO command. - If more than one TSO command is submitted, the \ :emphasis:`max\_rc`\ applies to all TSO commands. + If more than one TSO command is submitted, the *max_rc* applies to all TSO commands. | **returned**: always | **type**: int diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index ed6a26a8f..89b4b065c 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -16,8 +16,8 @@ zos_unarchive -- Unarchive files and data sets in z/OS. Synopsis -------- -- The \ :literal:`zos\_unarchive`\ module unpacks an archive after optionally transferring it to the remote system. -- For supported archive formats, see option \ :literal:`format`\ . +- The ``zos_unarchive`` module unpacks an archive after optionally transferring it to the remote system. +- For supported archive formats, see option ``format``. - Supported sources are USS (UNIX System Services) or z/OS data sets. - Mixing MVS data sets with USS files for unarchiving is not supported. - The archive is sent to the remote as binary, so no encoding is performed. @@ -33,11 +33,13 @@ Parameters src The remote absolute path or data set of the archive to be uncompressed. - \ :emphasis:`src`\ can be a USS file or MVS data set name. + *src* can be a USS file or MVS data set name. USS file paths should be absolute paths. - MVS data sets supported types are \ :literal:`SEQ`\ , \ :literal:`PDS`\ , \ :literal:`PDSE`\ . + MVS data sets supported types are ``SEQ``, ``PDS``, ``PDSE``. + + GDS relative names are supported ``e.g. USER.GDG(-1)``. | **required**: True | **type**: str @@ -72,14 +74,14 @@ format If the data set provided exists, the data set must have the following attributes: LRECL=255, BLKSIZE=3120, and RECFM=VB - When providing the \ :emphasis:`xmit\_log\_data\_set`\ name, ensure there is adequate space. + When providing the *xmit_log_data_set* name, ensure there is adequate space. | **required**: False | **type**: str use_adrdssu - If set to true, the \ :literal:`zos\_archive`\ module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using \ :literal:`xmit`\ or \ :literal:`terse`\ . + If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using ``xmit`` or ``terse``. | **required**: False | **type**: bool @@ -87,7 +89,7 @@ format dest_volumes - When \ :emphasis:`use\_adrdssu=True`\ , specify the volume the data sets will be written to. + When *use_adrdssu=True*, specify the volume the data sets will be written to. If no volume is specified, storage management rules will be used to determine the volume where the file will be unarchived. @@ -103,7 +105,7 @@ format dest The remote absolute path or data set where the content should be unarchived to. - \ :emphasis:`dest`\ can be a USS file, directory or MVS data set name. + *dest* can be a USS file, directory or MVS data set name. If dest has missing parent directories, they will not be created. @@ -116,7 +118,7 @@ group When left unspecified, it uses the current group of the current user unless you are root, in which case it can preserve the previous ownership. - This option is only applicable if \ :literal:`dest`\ is USS, otherwise ignored. + This option is only applicable if ``dest`` is USS, otherwise ignored. | **required**: False | **type**: str @@ -125,13 +127,13 @@ group mode The permission of the uncompressed files. - If \ :literal:`dest`\ is USS, this will act as Unix file mode, otherwise ignored. + If ``dest`` is USS, this will act as Unix file mode, otherwise ignored. - It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like \ :literal:`0644`\ or \ :literal:`01777`\ )or quote it (like \ :literal:`'644'`\ or \ :literal:`'1777'`\ ) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. + It should be noted that modes are octal numbers. The user must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like ``0644`` or ``01777``)or quote it (like ``'644'`` or ``'1777'``) so Ansible receives a string and can do its own conversion from string into number. Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. - The mode may also be specified as a symbolic mode (for example, \`\`u+rwx\`\` or \`\`u=rw,g=r,o=r\`\`) or a special string \`preserve\`. + The mode may also be specified as a symbolic mode (for example, ``u+rwx`` or ``u=rw,g=r,o=r``) or a special string `preserve`. - \ :emphasis:`mode=preserve`\ means that the file will be given the same permissions as the source file. + *mode=preserve* means that the file will be given the same permissions as the source file. | **required**: False | **type**: str @@ -149,7 +151,9 @@ owner include A list of directories, files or data set names to extract from the archive. - When \ :literal:`include`\ is set, only those files will we be extracted leaving the remaining files in the archive. + GDS relative names are supported ``e.g. USER.GDG(-1)``. + + When ``include`` is set, only those files will we be extracted leaving the remaining files in the archive. Mutually exclusive with exclude. @@ -161,6 +165,8 @@ include exclude List the directory and file or data set names that you would like to exclude from the unarchive action. + GDS relative names are supported ``e.g. USER.GDG(-1)``. + Mutually exclusive with include. | **required**: False @@ -177,7 +183,7 @@ list dest_data_set - Data set attributes to customize a \ :literal:`dest`\ data set that the archive will be copied into. + Data set attributes to customize a ``dest`` data set that the archive will be copied into. | **required**: False | **type**: dict @@ -200,18 +206,18 @@ dest_data_set space_primary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the primary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the primary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int space_secondary - If the destination \ :emphasis:`dest`\ data set does not exist , this sets the secondary space allocated for the data set. + If the destination *dest* data set does not exist , this sets the secondary space allocated for the data set. - The unit of space used is set using \ :emphasis:`space\_type`\ . + The unit of space used is set using *space_type*. | **required**: False | **type**: int @@ -220,7 +226,7 @@ dest_data_set space_type If the destination data set does not exist, this sets the unit of measurement to use when defining primary and secondary space. - Valid units of size are \ :literal:`k`\ , \ :literal:`m`\ , \ :literal:`g`\ , \ :literal:`cyl`\ , and \ :literal:`trk`\ . + Valid units of size are ``k``, ``m``, ``g``, ``cyl``, and ``trk``. | **required**: False | **type**: str @@ -228,7 +234,7 @@ dest_data_set record_format - If the destination data set does not exist, this sets the format of the data set. (e.g \ :literal:`fb`\ ) + If the destination data set does not exist, this sets the format of the data set. (e.g ``fb``) Choices are case-sensitive. @@ -265,9 +271,9 @@ dest_data_set key_offset The key offset to use when creating a KSDS data set. - \ :emphasis:`key\_offset`\ is required when \ :emphasis:`type=ksds`\ . + *key_offset* is required when *type=ksds*. - \ :emphasis:`key\_offset`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_offset* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -276,9 +282,9 @@ dest_data_set key_length The key length to use when creating a KSDS data set. - \ :emphasis:`key\_length`\ is required when \ :emphasis:`type=ksds`\ . + *key_length* is required when *type=ksds*. - \ :emphasis:`key\_length`\ should only be provided when \ :emphasis:`type=ksds`\ + *key_length* should only be provided when *type=ksds* | **required**: False | **type**: int @@ -327,7 +333,7 @@ dest_data_set tmp_hlq Override the default high level qualifier (HLQ) for temporary data sets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the environment variable value ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -342,9 +348,9 @@ force remote_src - If set to true, \ :literal:`zos\_unarchive`\ retrieves the archive from the remote system. + If set to true, ``zos_unarchive`` retrieves the archive from the remote system. - If set to false, \ :literal:`zos\_unarchive`\ searches the local machine (Ansible controller) for the archive. + If set to false, ``zos_unarchive`` searches the local machine (Ansible controller) for the archive. | **required**: False | **type**: bool @@ -385,6 +391,13 @@ Examples - USER.ARCHIVE.TEST1 - USER.ARCHIVE.TEST2 + # Unarchive a GDS + - name: Unarchive a terse data set and excluding data sets from unpacking. + zos_unarchive: + src: "USER.ARCHIVE(0)" + format: + name: terse + # List option - name: List content from XMIT zos_unarchive: @@ -404,7 +417,7 @@ Notes .. note:: VSAMs are not supported. - This module uses \ `zos\_copy <./zos_copy.html>`__\ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. + This module uses `zos_copy <./zos_copy.html>`_ to copy local scripts to the remote machine which uses SFTP (Secure File Transfer Protocol) for the underlying transfer protocol; SCP (secure copy protocol) and Co:Z SFTP are not supported. In the case of Co:z SFTP, you can exempt the Ansible user id on z/OS from using Co:Z thus falling back to using standard SFTP. If the module detects SCP, it will temporarily use SFTP for transfers, if not available, the module will fail. diff --git a/docs/source/modules/zos_volume_init.rst b/docs/source/modules/zos_volume_init.rst index a2b6f25ab..5647ad998 100644 --- a/docs/source/modules/zos_volume_init.rst +++ b/docs/source/modules/zos_volume_init.rst @@ -17,14 +17,14 @@ zos_volume_init -- Initialize volumes or minidisks. Synopsis -------- - Initialize a volume or minidisk on z/OS. -- \ :emphasis:`zos\_volume\_init`\ will create the volume label and entry into the volume table of contents (VTOC). +- *zos_volume_init* will create the volume label and entry into the volume table of contents (VTOC). - Volumes are used for storing data and executable programs. - A minidisk is a portion of a disk that is linked to your virtual machine. - A VTOC lists the data sets that reside on a volume, their location, size, and other attributes. -- \ :emphasis:`zos\_volume\_init`\ uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class \`STGADMIN.ICK.INIT\`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. -- ICKDSF is an Authorized Program Facility (APF) program on z/OS, \ :emphasis:`zos\_volume\_init`\ will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. +- *zos_volume_init* uses the ICKDSF command INIT to initialize a volume. In some cases the command could be protected by facility class `STGADMIN.ICK.INIT`. Protection occurs when the class is active, and the class profile is defined. Ensure the user executing the Ansible task is permitted to execute ICKDSF command INIT, otherwise, any user can use the command. +- ICKDSF is an Authorized Program Facility (APF) program on z/OS, *zos_volume_init* will run in authorized mode but if the program ICKDSF is not APF authorized, the task will end. - Note that defaults set on target z/OS systems may override ICKDSF parameters. -- If is recommended that data on the volume is backed up as the \ :emphasis:`zos\_volume\_init`\ module will not perform any backups. You can use the \ `zos\_backup\_restore <./zos_backup_restore.html>`__\ module to backup a volume. +- If is recommended that data on the volume is backed up as the *zos_volume_init* module will not perform any backups. You can use the `zos_backup_restore <./zos_backup_restore.html>`_ module to backup a volume. @@ -35,9 +35,9 @@ Parameters address - \ :emphasis:`address`\ is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. + *address* is a 3 or 4 digit hexadecimal number that specifies the address of the volume or minidisk. - \ :emphasis:`address`\ can be the number assigned to the device (device number) when it is installed or the virtual address. + *address* can be the number assigned to the device (device number) when it is installed or the virtual address. | **required**: True | **type**: str @@ -46,15 +46,15 @@ address verify_volid Verify that the volume serial matches what is on the existing volume or minidisk. - \ :emphasis:`verify\_volid`\ must be 1 to 6 alphanumeric characters or \ :literal:`\*NONE\*`\ . + *verify_volid* must be 1 to 6 alphanumeric characters or ``*NONE*``. - To verify that a volume serial number does not exist, use \ :emphasis:`verify\_volid=\*NONE\*`\ . + To verify that a volume serial number does not exist, use *verify_volid=*NONE**. - If \ :emphasis:`verify\_volid`\ is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. + If *verify_volid* is specified and the volume serial number does not match that found on the volume or minidisk, initialization does not complete. - If \ :emphasis:`verify\_volid=\*NONE\*`\ is specified and a volume serial is found on the volume or minidisk, initialization does not complete. + If *verify_volid=*NONE** is specified and a volume serial is found on the volume or minidisk, initialization does not complete. - Note, this option is \ :strong:`not`\ a boolean, leave it blank to skip the verification. + Note, this option is **not** a boolean, leave it blank to skip the verification. | **required**: False | **type**: str @@ -73,11 +73,11 @@ volid Expects 1-6 alphanumeric, national ($,#,@) or special characters. - A \ :emphasis:`volid`\ with less than 6 characters will be padded with spaces. + A *volid* with less than 6 characters will be padded with spaces. - A \ :emphasis:`volid`\ can also be referred to as volser or volume serial number. + A *volid* can also be referred to as volser or volume serial number. - When \ :emphasis:`volid`\ is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. + When *volid* is not specified for a previously initialized volume or minidisk, the volume serial number will remain unchanged. | **required**: False | **type**: str @@ -99,7 +99,7 @@ index The VTOC index enhances the performance of VTOC access. - When set to \ :emphasis:`false`\ , no index will be created. + When set to *false*, no index will be created. | **required**: False | **type**: bool @@ -109,7 +109,7 @@ index sms_managed Specifies that the volume be managed by Storage Management System (SMS). - If \ :emphasis:`sms\_managed`\ is \ :emphasis:`true`\ then \ :emphasis:`index`\ must also be \ :emphasis:`true`\ . + If *sms_managed* is *true* then *index* must also be *true*. | **required**: False | **type**: bool @@ -127,7 +127,7 @@ verify_volume_empty tmp_hlq Override the default high level qualifier (HLQ) for temporary and backup datasets. - The default HLQ is the Ansible user used to execute the module and if that is not available, then the value \ :literal:`TMPHLQ`\ is used. + The default HLQ is the Ansible user used to execute the module and if that is not available, then the value ``TMPHLQ`` is used. | **required**: False | **type**: str diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index c8c2f6e96..45f3f100a 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,6 +6,110 @@ Releases ======== +Version 1.11.0-beta.1 +===================== + +Minor Changes +------------- + +- ``zos_apf`` - Added support that auto-escapes 'library' names containing symbols. +- ``zos_archive`` - Added support for GDG and GDS relative name notation to archive data sets. Added support for data set names with special characters like $, /#, /- and @. +- ``zos_backup_restore`` - Added support for GDS relative name notation to include or exclude data sets when operation is backup. Added support for data set names with special characters like $, /#, and @. +- ``zos_blockinfile`` - Added support for GDG and GDS relative name notation to specify a data set. And backup in new generations. Added support for data set names with special characters like $, /#, /- and @. +- ``zos_copy`` - Added support for copying from and copying to generation data sets (GDS) and generation data groups (GDG) including using a GDS for backup. +- ``zos_data_set`` - Added support for GDG and GDS relative name notation to create, delete, catalog and uncatalog a data set. Added support for data set names with special characters like $, /#, /- and @. +- ``zos_encode`` - Added support for converting the encodings of generation data sets (GDS). Also added support to backup into GDS. +- ``zos_fetch`` - Added support for fetching generation data groups (GDG) and generation data sets (GDS). Added support for specifying data set names with special characters like $, /#, /- and @. +- ``zos_find`` - Added support for finding generation data groups (GDG) and generation data sets (GDS). Added support for specifying data set names with special characters like $, /#, /- and @. +- ``zos_job_submit`` + + - Improved the mechanism for copying to remote systems by removing the use of deepcopy, which had previously resulted in the module failing on some systems. + - Added support for running JCL stored in generation data groups (GDG) and generation data sets (GDS). + +- ``zos_lineinfile`` - Added support for GDG and GDS relative name notation to specify the target data set and to backup into new generations. Added support for data set names with special characters like $, /#, /- and @. +- ``zos_mount`` - Added support for data set names with special characters ($, /#, /- and @). +- ``zos_mvs_raw`` - Added support for GDG and GDS relative name notation to specify data set names. Added support for data set names with special characters like $, /#, /- and @. +- ``zos_script`` - Improved the mechanism for copying to remote systems by removing the use of deepcopy, which had previously resulted in the module failing on some systems. +- ``zos_tso_command`` - Added support for using GDG and GDS relative name notation in running TSO commands. Added support for data set names with special characters like $, /#, /- and @. +- ``zos_unarchive`` + + - Added support for data set names with special characters like $, /#, /- and @. + - Improved the mechanism for copying to remote systems by removing the use of deepcopy, which had previously resulted in the module failing on some systems. + +Bugfixes +-------- + +- ``zos_copy`` + + - a regression in version 1.4.0 made the module stop automatically computing member names when copying a single file into a PDS/E. Fix now lets a user copy a single file into a PDS/E without adding a member in the dest option. + - module would use opercmd to check if a non existent destination data set is locked. Fix now only checks if the destination is already present. + +- ``zos_data_set`` - When checking if a data set is cataloged, module failed to account for exceptions which occurred during the LISTCAT. The fix now raises an MVSCmdExecError if the return code from LISTCAT is too high. +- ``zos_job_submit`` - The module was not propagating any error types including UnicodeDecodeError, JSONDecodeError, TypeError, KeyError when encountered. The fix now shares the type error in the error message. +- ``zos_mvs_raw`` - The first character of each line in dd_output was missing. The fix now includes the first character of each line. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Requirements +------------ + +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. + +Known Issues +------------ +- ``zos_job_submit`` - when setting 'location' to 'local' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. +- ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. +- ``zos_apf`` - When trying to remove a library that contains the '$' character in the name from APF(authorized program facility), operation will fail. +- In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. + + +Version 1.9.2 +============= + +Bugfixes +-------- + +- ``zos_copy`` - when creating the destination data set, the module would unnecessarily check if a data set is locked by another process. The module no longer performs this check when it creates the data set. + +Availability +------------ + +* `Automation Hub`_ +* `Galaxy`_ +* `GitHub`_ + +Requirements +------------ + +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. + +Known Issues +------------ + +- ``zos_job_submit`` - when setting 'location' to 'LOCAL' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. +- ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. + +- ``zos_job_submit``, ``zos_job_output``, ``zos_operator_action_query`` - encounters UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. This has been addressed in this release and corrected with **ZOAU version 1.2.5.6** or later. + + - If the appropriate level of ZOAU can not be installed, some options are to: + + - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. + - Ignore module errors by using **ignore_errors:true** for a specific playbook task. + - If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a registered variable to extract the + job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. + +- ``zos_data_set`` - An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended and documented **space_primary** option. + +- In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. + + Version 1.10.0 ============== @@ -134,19 +238,6 @@ Bugfixes - ``zos_find`` - Option size failed if a PDS/E matched the pattern, now filtering on utilized size for a PDS/E is supported. - ``zos_mvs_raw`` - Option **tmp_hlq** when creating temporary data sets was previously ignored, now the option honors the High Level Qualifier for temporary data sets created during the module execution. -Availability ------------- - -* `Automation Hub`_ -* `Galaxy`_ -* `GitHub`_ - -Requirements ------------- - -The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the -controller and z/OS managed node dependencies. - Known Issues ------------ @@ -165,7 +256,18 @@ Known Issues - ``zos_data_set`` - An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended and documented **space_primary** option. -- In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. +Availability +------------ + +* `Automation Hub`_ +* `Galaxy`_ +* `GitHub`_ + +Requirements +------------ + +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. Version 1.9.0 ============= diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 391456769..9a5adbce8 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -89,6 +89,11 @@ The z/OS managed node includes several shells, currently the only supported shel +---------+----------------------------+---------------------------------------------------+---------------+---------------+ | Version | Controller | Managed Node | GA | End of Life | +=========+============================+===================================================+===============+===============+ +| 1.11.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V3Rx | In preview | TBD | +| |- `Ansible`_ >=8.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ >=1.3.1 | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ | 1.10.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V2Rx | 21 June 2024 | 21 June 2026 | | |- `Ansible`_ >=8.0.x |- `z/OS shell`_ | | | | |- `AAP`_ >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | diff --git a/galaxy.yml b/galaxy.yml index 2e9d280dc..910442ef8 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: "1.10.0" +version: "1.11.0-beta.1" # Collection README file readme: README.md diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 5bc58ec94..16ee31ca9 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.10.0" +version: "1.11.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - ">=1.3.0" + - ">=1.3.1" diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index c3e4ec1ee..4d0a0c11b 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -276,7 +276,7 @@ def run(self, tmp=None, task_vars=None): local_checksum = _get_file_checksum(dest) # ********************************************************** # - # Fetch remote data. + # Fetch remote data. # # ********************************************************** # try: if ds_type in SUPPORTED_DS_TYPES: diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py index e481052a5..d51c48ddf 100644 --- a/plugins/action/zos_script.py +++ b/plugins/action/zos_script.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2023, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/doc_fragments/template.py b/plugins/doc_fragments/template.py index 1eea4ad3d..2215c0a4a 100644 --- a/plugins/doc_fragments/template.py +++ b/plugins/doc_fragments/template.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/doc_fragments/template.py-e b/plugins/doc_fragments/template.py-e new file mode 100644 index 000000000..af96f7b9d --- /dev/null +++ b/plugins/doc_fragments/template.py-e @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2022, 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r''' +options: + use_template: + description: + - Whether the module should treat C(src) as a Jinja2 template and + render it before continuing with the rest of the module. + - Only valid when C(src) is a local file or directory. + - All variables defined in inventory files, vars files and the playbook + will be passed to the template engine, + as well as L(Ansible special variables,https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html#special-variables), + such as C(playbook_dir), C(ansible_version), etc. + - If variables defined in different scopes share the same name, Ansible will + apply variable precedence to them. You can see the complete precedence order + L(in Ansible's documentation,https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence) + type: bool + default: false + template_parameters: + description: + - Options to set the way Jinja2 will process templates. + - Jinja2 already sets defaults for the markers it uses, you can find more + information at its L(official documentation,https://jinja.palletsprojects.com/en/latest/templates/). + - These options are ignored unless C(use_template) is true. + required: false + type: dict + suboptions: + variable_start_string: + description: + - Marker for the beginning of a statement to print a variable in Jinja2. + type: str + default: '{{' + variable_end_string: + description: + - Marker for the end of a statement to print a variable in Jinja2. + type: str + default: '}}' + block_start_string: + description: + - Marker for the beginning of a block in Jinja2. + type: str + default: '{%' + block_end_string: + description: + - Marker for the end of a block in Jinja2. + type: str + default: '%}' + comment_start_string: + description: + - Marker for the beginning of a comment in Jinja2. + type: str + default: '{#' + comment_end_string: + description: + - Marker for the end of a comment in Jinja2. + type: str + default: '#}' + line_statement_prefix: + description: + - Prefix used by Jinja2 to identify line-based statements. + type: str + required: false + line_comment_prefix: + description: + - Prefix used by Jinja2 to identify comment lines. + type: str + required: false + lstrip_blocks: + description: + - Whether Jinja2 should strip leading spaces from the start of a line + to a block. + type: bool + default: false + trim_blocks: + description: + - Whether Jinja2 should remove the first newline after a block is removed. + - Setting this option to C(False) will result in newlines being added to + the rendered template. This could create invalid code when working with + JCL templates or empty records in destination data sets. + type: bool + default: true + keep_trailing_newline: + description: + - Whether Jinja2 should keep the first trailing newline at the end of a + template after rendering. + type: bool + default: false + newline_sequence: + description: + - Sequence that starts a newline in a template. + type: str + default: '\\n' + choices: + - '\\n' + - '\\r' + - "\r\n" + auto_reload: + description: + - Whether to reload a template file when it has changed after the task + has started. + type: bool + default: false +''' diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index 8499361b6..716e0d3b2 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -139,7 +139,10 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): rc, out, err = _copy_pds(dsn, bk_dsn) if rc != 0: raise BackupError( - "Unable to backup data set {0} to {1}".format(dsn, bk_dsn) + "Unable to backup data set {0} to {1}.".format(dsn, bk_dsn), + rc=rc, + stdout=out, + stderr=err ) return bk_dsn diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index d3d8123c3..7b81fe2d1 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1999,24 +1999,24 @@ def create(self, tmp_hlq=None, replace=True, force=False): Indicates if changes were made. """ arguments = { - "name" : self.name, - "raw_name" : self.raw_name, - "type" : self.data_set_type, - "space_primary" : self.space_primary, - "space_secondary" : self.space_secondary, - "space_type" : self.space_type, - "record_format" : self.record_format, - "record_length" : self.record_length, - "block_size" : self.block_size, - "directory_blocks" : self.directory_blocks, - "key_length" : self.key_length, - "key_offset" : self.key_offset, - "sms_storage_class" : self.sms_storage_class, - "sms_data_class" : self.sms_data_class, - "sms_management_class" : self.sms_management_class, - "volumes" : self.volumes, - "tmp_hlq" : tmp_hlq, - "force" : force, + "name": self.name, + "raw_name": self.raw_name, + "type": self.data_set_type, + "space_primary": self.space_primary, + "space_secondary": self.space_secondary, + "space_type": self.space_type, + "record_format": self.record_format, + "record_length": self.record_length, + "block_size": self.block_size, + "directory_blocks": self.directory_blocks, + "key_length": self.key_length, + "key_offset": self.key_offset, + "sms_storage_class": self.sms_storage_class, + "sms_data_class": self.sms_data_class, + "sms_management_class": self.sms_management_class, + "volumes": self.volumes, + "tmp_hlq": tmp_hlq, + "force": force, } formatted_args = DataSet._build_zoau_args(**arguments) changed = False @@ -2048,25 +2048,25 @@ def ensure_present(self, tmp_hlq=None, replace=False, force=False): Indicates if changes were made. """ arguments = { - "name" : self.name, - "raw_name" : self.raw_name, - "type" : self.data_set_type, - "space_primary" : self.space_primary, - "space_secondary" : self.space_secondary, - "space_type" : self.space_type, - "record_format" : self.record_format, - "record_length" : self.record_length, - "block_size" : self.block_size, - "directory_blocks" : self.directory_blocks, - "key_length" : self.key_length, - "key_offset" : self.key_offset, - "sms_storage_class" : self.sms_storage_class, - "sms_data_class" : self.sms_data_class, - "sms_management_class" : self.sms_management_class, - "volumes" : self.volumes, - "replace" : replace, - "tmp_hlq" : tmp_hlq, - "force" : force, + "name": self.name, + "raw_name": self.raw_name, + "type": self.data_set_type, + "space_primary": self.space_primary, + "space_secondary": self.space_secondary, + "space_type": self.space_type, + "record_format": self.record_format, + "record_length": self.record_length, + "block_size": self.block_size, + "directory_blocks": self.directory_blocks, + "key_length": self.key_length, + "key_offset": self.key_offset, + "sms_storage_class": self.sms_storage_class, + "sms_data_class": self.sms_data_class, + "sms_management_class": self.sms_management_class, + "volumes": self.volumes, + "replace": replace, + "tmp_hlq": tmp_hlq, + "force": force, } rc = DataSet.ensure_present(**arguments) self.set_state("present") diff --git a/plugins/module_utils/vtoc.py b/plugins/module_utils/vtoc.py index 309d73c1e..3cae4fd92 100644 --- a/plugins/module_utils/vtoc.py +++ b/plugins/module_utils/vtoc.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 024ef8baa..ceeea04de 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -292,6 +292,7 @@ type: str ''' +import os import re import json from ansible.module_utils._text import to_text @@ -312,7 +313,7 @@ # supported data set types -DS_TYPE = ['PS', 'PO'] +DS_TYPE = data_set.DataSet.MVS_SEQ.union(data_set.DataSet.MVS_PARTITIONED) def backupOper(module, src, backup, tmphlq=None): @@ -340,11 +341,15 @@ def backupOper(module, src, backup, tmphlq=None): fail_json Creating backup has failed. """ - # analysis the file type - ds_utils = data_set.DataSetUtils(src) - file_type = ds_utils.ds_type() + file_type = None + if data_set.is_data_set(src): + file_type = data_set.DataSet.data_set_type(src) + else: + if os.path.exists(src): + file_type = 'USS' + if file_type != 'USS' and file_type not in DS_TYPE: - message = "{0} data set type is NOT supported".format(str(file_type)) + message = "Dataset {0} of type {1} is NOT supported".format(src, str(file_type)) module.fail_json(msg=message) # backup can be True(bool) or none-zero length string. string indicates that backup_name was provided. @@ -357,8 +362,17 @@ def backupOper(module, src, backup, tmphlq=None): backup_name = Backup.uss_file_backup(src, backup_name=backup, compress=False) else: backup_name = Backup.mvs_file_backup(dsn=src, bk_dsn=backup, tmphlq=tmphlq) + except Backup.BackupError as exc: + module.fail_json( + msg=exc.msg, + rc=exc.rc, + stdout=exc.stdout, + stderr=exc.stderr + ) except Exception: - module.fail_json(msg="creating backup has failed") + module.fail_json( + msg="An error ocurred during backup." + ) return backup_name diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index b9c825902..52fdd9585 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -380,7 +380,7 @@ format: name: terse format_options: - use_adrdssu: True + use_adrdssu: true - name: Archive multiple data sets into a new GDS zos_archive: @@ -389,7 +389,7 @@ format: name: terse format_options: - use_adrdssu: True + use_adrdssu: true ''' RETURN = r''' diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index a5fd05f45..ab6d2a0dd 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -39,7 +39,7 @@ PS (sequential data set), member of a PDS or PDSE, PDS, PDSE. - The USS file must be an absolute pathname. - Generation data set (GDS) relative name of generation already - created. C(e.g. SOME.CREATION(-1).) + created. ``e.g. SOME.CREATION(-1).`` type: str aliases: [ path, destfile, name ] required: true @@ -293,7 +293,7 @@ zos_blockinfile: src: SOME.CREATION.TEST insertbefore: BOF - backup: True + backup: true backup_name: CREATION.GDS(+1) block: "{{ CONTENT }}" ''' diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index 40b70a0fd..a17fcb7ed 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -616,7 +616,7 @@ def run_module(): result["dest"] = dest if ds_type_dest == "GDG": - raise EncodeError("Encoding of a whole generation data group is not yet supported.") + raise EncodeError("Encoding of a whole generation data group is not supported.") new_src = src_data_set.name if src_data_set else src new_dest = dest_data_set.name if dest_data_set else dest diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index 4bea0539d..e45595133 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -234,7 +234,6 @@ limit: 30 scratch: true purge: true - """ diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index e6e191060..d91b511c3 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -36,9 +36,9 @@ description: - The source file or data set containing the JCL to submit. - It could be a physical sequential data set, a partitioned data set - qualified by a member or a path (e.g. C(USER.TEST), V(USER.JCL(TEST\))), + qualified by a member or a path (e.g. C(USER.TEST), ``USER.JCL(TEST)``), or a generation data set from a generation data group - (for example, V(USER.TEST.GDG(-2\))). + (for example, ``USER.TEST.GDG(-2)``). - Or a USS file. (e.g C(/u/tester/demo/sample.jcl)) - Or a LOCAL file in ansible control node. (e.g C(/User/tester/ansible-playbook/sample.jcl)) diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index d3aa3b6b6..c5f262fe0 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -37,7 +37,7 @@ PS (sequential data set), member of a PDS or PDSE, PDS, PDSE. - The USS file must be an absolute pathname. - Generation data set (GDS) relative name of generation already - created. C(e.g. SOME.CREATION(-1).) + created. ``e.g. SOME.CREATION(-1).`` type: str aliases: [ path, destfile, name ] required: true @@ -251,7 +251,7 @@ zos_lineinfile: src: SOME.CREATION.TEST insertafter: EOF - backup: True + backup: true backup_name: CREATION.GDS(+1) line: 'Should be a working test now' """ diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index e3c8d4c6d..0a9394b67 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -89,7 +89,7 @@ description: - The data set name. - A data set name can be a GDS relative name. - - When using GDS relative name and it is a positive generation, disposition new must be used. + - When using GDS relative name and it is a positive generation, I(disposition=new) must be used. type: str required: false type: @@ -708,7 +708,7 @@ description: - The data set name. - A data set name can be a GDS relative name. - - When using GDS relative name and it is a positive generation, disposition new must be used. + - When using GDS relative name and it is a positive generation, I(disposition=new) must be used. type: str required: false type: diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 258d9972b..f5febbf90 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -36,7 +36,7 @@ - I(src) can be a USS file or MVS data set name. - USS file paths should be absolute paths. - MVS data sets supported types are C(SEQ), C(PDS), C(PDSE). - - GDS relative names are supported C(e.g. USER.GDG(-1)). + - GDS relative names are supported ``e.g. USER.GDG(-1)``. type: str required: true format: @@ -146,7 +146,7 @@ description: - A list of directories, files or data set names to extract from the archive. - - GDS relative names are supported C(e.g. USER.GDG(-1)). + - GDS relative names are supported ``e.g. USER.GDG(-1)``. - When C(include) is set, only those files will we be extracted leaving the remaining files in the archive. - Mutually exclusive with exclude. @@ -157,7 +157,7 @@ description: - List the directory and file or data set names that you would like to exclude from the unarchive action. - - GDS relative names are supported C(e.g. USER.GDG(-1)). + - GDS relative names are supported ``e.g. USER.GDG(-1)``. - Mutually exclusive with include. type: list elements: str diff --git a/tests/functional/modules/test_module_security.py b/tests/functional/modules/test_module_security.py index 744d8f595..4c3af3c15 100644 --- a/tests/functional/modules/test_module_security.py +++ b/tests/functional/modules/test_module_security.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020 +# Copyright (c) IBM Corporation 2020, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 918a4d36c..8fe0f4455 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -267,11 +267,6 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): clean_test_env(hosts, test_info) -# keyword: ENABLE-FOR-1-3 -# Test commented because there is a failure in ZOAU 1.2.x, that should be fixed in 1.3.x, so -# whoever works in issue https://github.com/ansible-collections/ibm_zos_core/issues/726 -# should uncomment this test as part of the validation process. - def test_batch_add_del(ansible_zos_module, volumes_with_vvds): try: hosts = ansible_zos_module diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index aa25110f8..1b01bebc7 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -852,5 +852,5 @@ def test_backup_into_gds(ansible_zos_module, dstype): assert result.get("changed") is True assert result.get("module_stderr") is None finally: - hosts.all.shell(cmd=f"drm ANSIBLE.* ") + hosts.all.shell(cmd=f"drm ANSIBLE.* ; drm OMVSADM.*") diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 2f9e6d3c2..84d0850da 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1539,17 +1539,39 @@ def test_uss_encoding(ansible_zos_module, encoding): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat {0}".format(params["path"])) + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) + for result in results.contacted.values(): + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + + params["src"] = ds_name + "(-1)" + results = hosts.all.zos_blockinfile(**params) + for result in results.contacted.values(): + assert result.get("changed") == 1 + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) + for result in results.contacted.values(): + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + + params_w_bck = dict(insertafter="eof", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=ds_name + "(+1)") + params_w_bck["src"] = ds_name + "(-1)" + results = hosts.all.zos_blockinfile(**params_w_bck) + for result in results.contacted.values(): + assert result.get("changed") == 1 + assert result.get("rc") == 0 + backup = ds_name + "(0)" + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) + for result in results.contacted.values(): + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + + params["src"] = ds_name + "(-3)" + results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_ENCODING + assert result.get("changed") == 0 finally: - remove_uss_environment(ansible_zos_module) + hosts.all.shell(cmd="""drm "ANSIBLE.*" """) @pytest.mark.ds -@pytest.mark.parametrize("dstype", DS_TYPE) -@pytest.mark.parametrize("encoding", ["IBM-1047"]) -def test_ds_encoding(ansible_zos_module, encoding, dstype): +def test_special_characters_ds_insert_block(ansible_zos_module): hosts = ansible_zos_module ds_type = dstype insert_data = "Insert this string" @@ -1592,9 +1614,21 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): ) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_ENCODING + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + + params_w_bck = dict(insertafter="eof", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=backup) + params_w_bck["src"] = ds_name + results = hosts.all.zos_blockinfile(**params_w_bck) + for result in results.contacted.values(): + assert result.get("changed") == 1 + assert result.get("rc") == 0 + backup = backup.replace('$', "\$") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) + for result in results.contacted.values(): + assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" + finally: - remove_ds_environment(ansible_zos_module, ds_name) + hosts.all.shell(cmd="""drm "ANSIBLE.*" """) ######################### diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index e8e37375c..76c75dd32 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -2348,6 +2348,18 @@ def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): src_ds = TEST_PS dest = "/tmp/ddchkpt" + hosts = ansible_zos_module + mlq_size = 3 + cobol_src_pds = get_tmp_ds_name(mlq_size) + cobol_src_mem = "HELLOCBL" + cobol_src_mem2 = "HICBL2" + src_lib = get_tmp_ds_name(mlq_size) + dest_lib = get_tmp_ds_name(mlq_size) + dest_lib_aliases = get_tmp_ds_name(mlq_size) + pgm_mem = "HELLO" + pgm2_mem = "HELLO2" + pgm_mem_alias = "ALIAS1" + pgm2_mem_alias = "ALIAS2" try: hosts.all.file(path=dest, state="touch") @@ -2372,6 +2384,23 @@ def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): finally: hosts.all.file(path=dest, state="absent") + else: + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib), + remote_src=True, + executable=True, + aliases=False + ) + # copy src loadlib to dest library pds w aliases + copy_res_aliases = hosts.all.zos_copy( + src="{0}".format(src_lib), + dest="{0}".format(dest_lib_aliases), + remote_src=True, + executable=True, + aliases=True + ) @pytest.mark.uss @pytest.mark.seq @@ -2414,6 +2443,69 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) + # Copying the remote loadlibs in USS to a local dir. + # This section ONLY handles ONE host, so if we ever use multiple hosts to + # test, we will need to update this code. + remote_user = hosts["options"]["user"] + # Removing a trailing comma because the framework saves the hosts list as a + # string instead of a list. + remote_host = hosts["options"]["inventory"].replace(",", "") + + tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") + cmd = [ + "sftp", + "-r", + f"{remote_user}@{remote_host}:{uss_location}", + f"{tmp_folder.name}" + ] + with subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE) as sftp_proc: + result = sftp_proc.stdout.read() + + source_path = os.path.join(tmp_folder.name, os.path.basename(uss_location)) + + if not is_created: + # ensure dest data sets absent for this variation of the test case. + hosts.all.zos_data_set(name=dest_lib, state="absent") + else: + # allocate dest loadlib to copy over without an alias. + hosts.all.zos_data_set( + name=dest_lib, + state="present", + type="pdse", + record_format="u", + record_length=0, + block_size=32760, + space_primary=2, + space_type="m", + replace=True + ) + + if not is_created: + # dest data set does not exist, specify it in dest_dataset param. + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src=source_path, + dest="{0}".format(dest_lib), + executable=True, + aliases=False, + dest_data_set={ + 'type': "pdse", + 'record_format': "u", + 'record_length': 0, + 'block_size': 32760, + 'space_primary': 2, + 'space_type': "m", + } + ) + else: + # copy src loadlib to dest library pds w/o aliases + copy_res = hosts.all.zos_copy( + src=source_path, + dest="{0}".format(dest_lib), + executable=True, + aliases=False + ) + for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True @@ -2480,6 +2572,7 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): assert result.get("rc") == 0 assert result.get("stdout") != "" finally: + hosts.all.shell(cmd='rm -r /tmp/c') hosts.all.zos_data_set(name=dest, state="absent") From 895ecfb533bb9512928d834501b21627469124bb Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 16 Aug 2024 15:38:29 -0600 Subject: [PATCH 453/495] [Enhancement] [zos_operator] Case sensitivity in zos_operator commands (#1641) * Add case_sensitive option to module * Update module doc * Add test for case sensitivity * Fix pylint issues in tests * Update module doc * Add changelog fragment * Update copyright year * Change case in test * Change changelog fragment --- .../1641-case-sensitivity-zos_operator.yml | 4 + docs/source/modules/zos_operator.rst | 12 ++- plugins/modules/zos_operator.py | 22 ++++- .../modules/test_zos_operator_func.py | 90 +++++++++++-------- 4 files changed, 89 insertions(+), 39 deletions(-) create mode 100644 changelogs/fragments/1641-case-sensitivity-zos_operator.yml diff --git a/changelogs/fragments/1641-case-sensitivity-zos_operator.yml b/changelogs/fragments/1641-case-sensitivity-zos_operator.yml new file mode 100644 index 000000000..1079776f9 --- /dev/null +++ b/changelogs/fragments/1641-case-sensitivity-zos_operator.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_operator - Added new option ``case_sensitive`` to module, allowing users + to control how case in a command is handled by it. + (https://github.com/ansible-collections/ibm_zos_core/pull/1641) \ No newline at end of file diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 2bd53fc83..5bc803962 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -33,10 +33,12 @@ cmd For example, change the command "...,P='DSN3EPX,-DBC1,S'" to "...,P=''DSN3EPX,-DBC1,S'' ". - If the command contains any special characters ($, &, etc), they must be escaped using double backslashes like \\\\\\$. + If the command contains any special characters ($, &, etc), they must be escaped using double backslashes like \\\\$. For example, to display job by job name the command would be ``cmd:"\\$dj''HELLO''"`` + By default, the command will be converted to uppercase before execution, to control this behavior, see the \ :emphasis:`case\_sensitive`\ option below. + | **required**: True | **type**: str @@ -63,6 +65,14 @@ wait_time_s | **default**: 1 +case_sensitive + If \ :literal:`true`\ , the command will not be converted to uppercase before execution. Instead, the casing will be preserved just as it was written in a task. + + | **required**: False + | **type**: bool + | **default**: False + + Examples diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index 54817936d..b2d8c0c52 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -29,6 +29,7 @@ - "Demetrios Dimatos (@ddimatos)" - "Rich Parker (@richp405)" - "Oscar Fernando Flores (@fernandofloresg)" + - "Ivan Moreno (@rexemin)" options: cmd: description: @@ -38,6 +39,8 @@ - If the command contains any special characters ($, &, etc), they must be escaped using double backslashes like \\\\\\$. - For example, to display job by job name the command would be C(cmd:"\\$dj''HELLO''") + - By default, the command will be converted to uppercase before execution, to control this + behavior, see the I(case_sensitive) option below. type: str required: true verbose: @@ -58,6 +61,14 @@ type: int required: false default: 1 + case_sensitive: + description: + - If C(true), the command will not be converted to uppercase before + execution. Instead, the casing will be preserved just as it was + written in a task. + type: bool + required: false + default: false notes: - Commands may need to use specific prefixes like $, they can be discovered by issuing the following command C(D OPDATA,PREFIX). @@ -177,7 +188,7 @@ opercmd = ZOAUImportError(traceback.format_exc()) -def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): +def execute_command(operator_cmd, timeout_s=1, preserve=False, *args, **kwargs): """ Executes an operator command. @@ -187,6 +198,8 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): Command to execute. timeout : int Time until it stops whether it finished or not. + preserve : bool + Whether to tell opercmd to preserve the case in the command. *args : dict Some arguments to pass on. **kwargs : dict @@ -201,7 +214,7 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): timeout_c = 100 * timeout_s start = timer() - response = opercmd.execute(operator_cmd, timeout=timeout_c, *args, **kwargs) + response = opercmd.execute(operator_cmd, timeout=timeout_c, preserve=preserve, *args, **kwargs) end = timer() rc = response.rc stdout = response.stdout_response @@ -228,6 +241,7 @@ def run_module(): cmd=dict(type="str", required=True), verbose=dict(type="bool", required=False, default=False), wait_time_s=dict(type="int", required=False, default=1), + case_sensitive=dict(type="bool", required=False, default=False), ) result = dict(changed=False) @@ -314,6 +328,7 @@ def parse_params(params): cmd=dict(arg_type="str", required=True), verbose=dict(arg_type="bool", required=False), wait_time_s=dict(arg_type="int", required=False), + case_sensitive=dict(arg_type="bool", required=False), ) parser = BetterArgParser(arg_defs) new_params = parser.parse_args(params) @@ -344,6 +359,7 @@ def run_operator_command(params): wait_s = params.get("wait_time_s") cmdtxt = params.get("cmd") + preserve = params.get("case_sensitive") use_wait_arg = False if zoau_version_checker.is_zoau_version_higher_than("1.2.4"): @@ -353,7 +369,7 @@ def run_operator_command(params): kwargs.update({"wait": True}) args = [] - rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout_s=wait_s, *args, **kwargs) + rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout_s=wait_s, preserve=preserve, *args, **kwargs) if rc > 0: message = "\nOut: {0}\nErr: {1}\nRan: {2}".format(stdout, stderr, cmdtxt) diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index eb1bf1f60..fa06ca2ee 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2023 +# Copyright (c) IBM Corporation 2019, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -15,9 +15,8 @@ __metaclass__ = type -import pytest -import yaml import os +import yaml from shellescape import quote from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( @@ -97,10 +96,10 @@ def test_zos_operator_invalid_command_to_ensure_transparency(ansible_zos_module) results = hosts.all.zos_operator(cmd="DUMP COMM=('ERROR DUMP')", verbose=False) for result in results.contacted.values(): assert result.get("changed") is True - transparency = False - if any('DUMP COMMAND' in str for str in result.get("content")): - transparency = True - assert transparency + transparency = False + if any('DUMP COMMAND' in str for str in result.get("content")): + transparency = True + assert transparency def test_zos_operator_positive_path(ansible_zos_module): @@ -120,6 +119,7 @@ def test_zos_operator_positive_path_verbose(ansible_zos_module): assert result.get("changed") is True assert result.get("content") is not None # Traverse the content list for a known verbose keyword and track state + is_verbose = False if any('BGYSC0804I' in str for str in result.get("content")): is_verbose = True assert is_verbose @@ -171,6 +171,24 @@ def test_zos_operator_positive_verbose_blocking(ansible_zos_module): assert result.get('elapsed') >= wait_time_s +def test_zos_operator_positive_path_preserve_case(ansible_zos_module): + hosts = ansible_zos_module + command = "D U,all" + results = hosts.all.zos_operator( + cmd=command, + verbose=False, + case_sensitive=True + ) + + for result in results.contacted.values(): + assert result["rc"] == 0 + assert result.get("changed") is True + assert result.get("content") is not None + # Making sure the output from opercmd logged the command + # exactly as it was written. + assert len(result.get("content")) > 1 + assert command in result.get("content")[1] + def test_response_come_back_complete(ansible_zos_module): hosts = ansible_zos_module @@ -185,31 +203,33 @@ def test_response_come_back_complete(ansible_zos_module): def test_zos_operator_parallel_terminal(get_config): - path = get_config - with open(path, 'r') as file: - enviroment = yaml.safe_load(file) - ssh_key = enviroment["ssh_key"] - hosts = enviroment["host"].upper() - user = enviroment["user"].upper() - python_path = enviroment["python_path"] - cut_python_path = python_path[:python_path.find('/bin')].strip() - zoau = enviroment["environment"]["ZOAU_ROOT"] - try: - playbook = "playbook.yml" - inventory = "inventory.yml" - os.system("echo {0} > {1}".format(quote(PARALLEL_RUNNING.format( - zoau, - cut_python_path, - )), playbook)) - os.system("echo {0} > {1}".format(quote(INVENTORY.format( - hosts, - ssh_key, - user, - )), inventory)) - command = "(ansible-playbook -i {0} {1}) & (ansible-playbook -i {0} {1})".format(inventory, playbook) - stdout = os.system(command) - assert stdout == 0 - finally: - os.remove("inventory.yml") - os.remove("playbook.yml") - + path = get_config + with open(path, 'r') as file: + enviroment = yaml.safe_load(file) + ssh_key = enviroment["ssh_key"] + hosts = enviroment["host"].upper() + user = enviroment["user"].upper() + python_path = enviroment["python_path"] + cut_python_path = python_path[:python_path.find('/bin')].strip() + zoau = enviroment["environment"]["ZOAU_ROOT"] + try: + playbook = "playbook.yml" + inventory = "inventory.yml" + os.system("echo {0} > {1}".format(quote(PARALLEL_RUNNING.format( + zoau, + cut_python_path, + )), playbook)) + os.system("echo {0} > {1}".format(quote(INVENTORY.format( + hosts, + ssh_key, + user, + )), inventory)) + command = "(ansible-playbook -i {0} {1}) & (ansible-playbook -i {0} {1})".format( + inventory, + playbook + ) + stdout = os.system(command) + assert stdout == 0 + finally: + os.remove("inventory.yml") + os.remove("playbook.yml") From edd387817b084c06a6505ff5da64edf9a546573c Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Fri, 16 Aug 2024 15:26:23 -0700 Subject: [PATCH 454/495] Initial commit for load balancer (#828) * Initial commit for test case load balancer Signed-off-by: ddimatos <dimatos@gmail.com> * Updated load balancer with more messaging and bug fixes Signed-off-by: ddimatos <dimatos@gmail.com> * Update to load balancer Signed-off-by: ddimatos <dimatos@gmail.com> * Cleaned up a typo Signed-off-by: ddimatos <dimatos@gmail.com> * Update with typo correction Signed-off-by: ddimatos <dimatos@gmail.com> * test edit Signed-off-by: ddimatos <dimatos@gmail.com> * test edit Signed-off-by: ddimatos <dimatos@gmail.com> * Add load balancer updates Signed-off-by: ddimatos <dimatos@gmail.com> * remove print stmt Signed-off-by: ddimatos <dimatos@gmail.com> * Test executor updates Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to test framwork to allow for dictionary of configs vs file Signed-off-by: ddimatos <dimatos@gmail.com> * Updates using the new zinventory-raw fixture option Signed-off-by: ddimatos <dimatos@gmail.com> * Commented out unused code Signed-off-by: ddimatos <dimatos@gmail.com> * Refactored and commented code Signed-off-by: ddimatos <dimatos@gmail.com> * Added back commented runtime error Signed-off-by: ddimatos <dimatos@gmail.com> * Improvments to the codes logic Signed-off-by: ddimatos <dimatos@gmail.com> * Fixed a variable name bug Signed-off-by: ddimatos <dimatos@gmail.com> * Update executor with additional pydoc Signed-off-by: ddimatos <dimatos@gmail.com> * Updated the arg parser and added new args Signed-off-by: ddimatos <dimatos@gmail.com> * Updated the arg parser and added new args Signed-off-by: ddimatos <dimatos@gmail.com> * Added ability to pass in a list of z/OS managed nodes that overrides the default behavior Signed-off-by: ddimatos <dimatos@gmail.com> * Add updated sub process command with separate stdout and stderr Signed-off-by: ddimatos <dimatos@gmail.com> * Update balacer with new logic Signed-off-by: ddimatos <dimatos@gmail.com> * Update with advanced rebalance logic Signed-off-by: ddimatos <dimatos@gmail.com> * Update verobse and capture logic Signed-off-by: ddimatos <dimatos@gmail.com> * Remove prefix to ansible module utils that casues an pytest error Signed-off-by: ddimatos <dimatos@gmail.com> * Update with new doc Signed-off-by: ddimatos <dimatos@gmail.com> * Added new helpers Signed-off-by: ddimatos <dimatos@gmail.com> * Update source with new logic Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional capabilities Signed-off-by: ddimatos <dimatos@gmail.com> * Updated --zinventory-raw fixture to support extra_args Signed-off-by: ddimatos <dimatos@gmail.com> * comment formatting Signed-off-by: ddimatos <dimatos@gmail.com> * udated with html generation Signed-off-by: ddimatos <dimatos@gmail.com> * udated with html generation Signed-off-by: ddimatos <dimatos@gmail.com> * Rename file Signed-off-by: ddimatos <dimatos@gmail.com> * Update CE tool Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to create and manage log Signed-off-by: ddimatos <dimatos@gmail.com> * added throttle support Signed-off-by: ddimatos <dimatos@gmail.com> * Added an overall return code to CE Signed-off-by: ddimatos <dimatos@gmail.com> * Bumped up the connection time out Signed-off-by: ddimatos <dimatos@gmail.com> * Lint updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update with lint Signed-off-by: ddimatos <dimatos@gmail.com> * Update module to use other modules and clean up pydoc Signed-off-by: ddimatos <dimatos@gmail.com> * scripts/utility.py Signed-off-by: ddimatos <dimatos@gmail.com> * Utlity and minor updates Signed-off-by: ddimatos <dimatos@gmail.com> * Expanded AC functionality with support for both depedency finder and pytest finder Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that resulted when a skip was used not in the results Signed-off-by: ddimatos <dimatos@gmail.com> * added color to messages and shuffle code Signed-off-by: ddimatos <dimatos@gmail.com> * Begin CE and AC integration Signed-off-by: ddimatos <dimatos@gmail.com> * Integration updates to AC Signed-off-by: ddimatos <dimatos@gmail.com> * Continued intergration of ac and ce Signed-off-by: ddimatos <dimatos@gmail.com> * Added support for return cocde Signed-off-by: ddimatos <dimatos@gmail.com> * Added support for return cocde Signed-off-by: ddimatos <dimatos@gmail.com> * Enhance return code status and clean up comments Signed-off-by: ddimatos <dimatos@gmail.com> * Updating simple test case for CE Signed-off-by: ddimatos <dimatos@gmail.com> * Rename file Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to support AC tooling integration Signed-off-by: ddimatos <dimatos@gmail.com> * Prepare env for folders Signed-off-by: ddimatos <dimatos@gmail.com> * Configurations moved to a folder for cleaner look Signed-off-by: ddimatos <dimatos@gmail.com> * Updated AC connection Signed-off-by: ddimatos <dimatos@gmail.com> * lint cleanup Signed-off-by: ddimatos <dimatos@gmail.com> * Add fragment Signed-off-by: ddimatos <dimatos@gmail.com> * force bash shell for gh actions to use becasue of the redirect used <<< Signed-off-by: ddimatos <dimatos@gmail.com> * Update scripts to not use bash redirect, fix bug in old ac-test path Signed-off-by: ddimatos <dimatos@gmail.com> * Update AC to avoid use of bash array types Signed-off-by: ddimatos <dimatos@gmail.com> * Port bash arrays to a portable style Signed-off-by: ddimatos <dimatos@gmail.com> * Update ztest.py extra var with initialization of None --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- ac | 817 ++++++- ansible.cfg | 3 + .../828-adds-concurrent-executor.yml | 7 + scripts/ce.py | 2089 +++++++++++++++++ .../requirements-2.11.env | 0 .../requirements-2.12.env | 2 +- .../requirements-2.13.env | 2 +- .../requirements-2.14.env | 4 +- .../requirements-2.15.env | 4 +- .../requirements-2.16.env | 4 +- .../requirements-2.17.env | 4 +- .../{ => configurations}/requirements-2.9.env | 0 .../requirements-common.env | 29 +- .../requirements-latest.env | 0 scripts/hosts.env | 130 +- scripts/info.env.axx | 30 +- scripts/modules/connection.py | 201 ++ scripts/modules/utils.py | 160 ++ scripts/mounts.sh | 18 +- scripts/tests/test_load_balance.py | 320 +++ scripts/venv.sh | 87 +- tests/conftest.py | 53 +- .../modules/test_zos_operator_func.py | 2 +- tests/helpers/ztest.py | 108 + 24 files changed, 3902 insertions(+), 172 deletions(-) create mode 100644 changelogs/fragments/828-adds-concurrent-executor.yml create mode 100644 scripts/ce.py rename scripts/{ => configurations}/requirements-2.11.env (100%) rename scripts/{ => configurations}/requirements-2.12.env (98%) rename scripts/{ => configurations}/requirements-2.13.env (98%) rename scripts/{ => configurations}/requirements-2.14.env (96%) rename scripts/{ => configurations}/requirements-2.15.env (96%) rename scripts/{ => configurations}/requirements-2.16.env (96%) rename scripts/{ => configurations}/requirements-2.17.env (96%) rename scripts/{ => configurations}/requirements-2.9.env (100%) rename scripts/{ => configurations}/requirements-common.env (68%) rename scripts/{ => configurations}/requirements-latest.env (100%) create mode 100644 scripts/modules/connection.py create mode 100644 scripts/modules/utils.py create mode 100644 scripts/tests/test_load_balance.py diff --git a/ac b/ac index 016b760ea..14fa159d4 100755 --- a/ac +++ b/ac @@ -28,7 +28,7 @@ VENV_HOME_MANAGED=${PWD%/venv}/venv -# Lest normalize the version from 3.10.2 to 3010002000 +# Normalize the version from 3.10.2 to 3010002000 # Do we we need that 4th octet? normalize_version() { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; @@ -103,15 +103,32 @@ DOCKER_INFO=`podman info> /dev/null 2>&1;echo $?` # ============================================================================== # Arg parsing helpers # ============================================================================== -terminate() { - printf '%s\n' "$1" >&2 + +# ------------------------------------------------------------------------------ +# This method generates an INFO message with green color and dividers. This +# message will always be sent to STDERR so that STDOUT can be reserved for +# return codes.. Use this method for messages to the console. +# ------------------------------------------------------------------------------ +message(){ + printf '%s\n' "${GRN}${DIV}${ENDC}" >&2 + printf '%s\n' "${GRN}INFO:${ENDC} ${1}" >&2 + printf '%s\n' "${GRN}${DIV}${ENDC}" >&2 +} + +# ------------------------------------------------------------------------------ +# This method generates an ERROR message with red color. This message +# will always be sent to STDERR so that STDOUT can be reserved for return codes. +# Use this method for error messages to the console. +# ------------------------------------------------------------------------------ +message_error(){ + ERROR_MSG="${RED}ERROR${ENDC}: $1" + printf '%s\n' "${ERROR_MSG}" >&2 exit 1 } -message(){ - echo $DIV; - echo "$1"; - echo $DIV; +message_warn(){ + WARN_MSG="${YEL}WARN${ENDC}: $1" + printf '%s\n' "${WARN_MSG}" >&2 } ensure_managed_venv_exists(){ @@ -123,6 +140,11 @@ ensure_managed_venv_exists(){ fi } +terminate() { + printf '%s\n' "$1" >&2 + exit 1 +} + # ------------------------------------------------------------------------------ # Generate simple formated but incomplete help # ------------------------------------------------------------------------------ @@ -187,6 +209,15 @@ help(){ helpMessage=" "substr($0, 3); \ print helpMessage } + } else if ($0 ~ /^##[[:space:]][[:space:]]*\$[[:space:]]*--/) { \ + helpMessage = substr($0, 3); \ + if (helpCommand && helpMessage) {\ + printf "\033[36m%-16s\033[0m %s\n", helpCommand, helpMessage; \ + helpCommand =""; \ + } else {\ + helpMessage=" "substr($0, 6); \ + print helpMessage + } } }' $0 fi @@ -200,7 +231,7 @@ option_processor(){ opt=$1 arg=$2 if [ "$arg" ]; then - echo $arg + echo "$arg" elif [ "$opt" ]; then # Split up to "=" and set the remainder value=${opt#*=} @@ -417,10 +448,12 @@ ac_sanity(){ ## defaults to all tests in file running. ## debug - enable debug for pytest (-s), choices are true and false ## Example: -## $ ac --ac-test --host ec01150a --python 3.10 --zoau 1.2.2 --file tests/functional/modules/test_zos_operator_func.py --test test_zos_operator_positive_path --debug true +## $ ac --ac-test --host ec01150a --python 3.10 --zoau 1.2.2\ +## $ --file tests/functional/modules/test_zos_operator_func.py --test test_zos_operator_positive_path --debug true ## $ ac --ac-test --host ec33012a --python 3.10 --zoau 1.2.2 --file tests/functional/modules/test_zos_operator_func.py --debug true ## $ ac --ac-test --file tests/functional/modules/test_zos_operator_func.py --debug true ## $ ac --ac-test +## $ ac --ac-test --host ec01130a --python 3.10 --zoau 1.3.1 --file invalid/test/returns/rc/of/4/to/stderr 2>>/dev/null ac_test(){ host=$1 python=$2 @@ -449,23 +482,390 @@ ac_test(){ exit 1 fi - #cd ${VENV_BIN} - if [ "$file" ]; then - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml "${debug}" + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest $CURR_DIR/${file} --ignore="${skip}" --host-pattern=all --zinventory=${VENV}/config.yml ${debug} >&2 ; echo $? >&1 else for file in `ls tests/functional/modules/*.py`; do - # For some reason '--ignore not being honored so injecting a work around if [ "$file" != "$skip" ]; then - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest --ignore="${skip}" $CURR_DIR/${file} --host-pattern=all --zinventory=${VENV}/config.yml "${debug}" + . ${VENV_BIN}/activate && ${VENV_BIN}/pytest $CURR_DIR/${file} --ignore="${skip}" --host-pattern=all --zinventory=${VENV}/config.yml ${debug} >&2 ; echo $? >&1 fi done fi # Clean up the collections folder after running the tests, temporary work around. rm -rf collections/ansible_collections +} - #cd ${CURR_DIR} +# ------------------------------------------------------------------------------ +# Run concurrent executor: +# ------------------------------------------------------------------------------ +#->test-concurrent: +## Run the conncurrent executor (CE) that can drive test cases to a cluster of hosts. +## Usage: ac --test-concurrent [--host <str, str>] [--user <str>] --python <str> [--zoau <str>] [--pythonpath <str>] +## [--volumes <str, str>] [--file <str, str>] [--skip <str, str>] [--itr <int>] [--replay <int>] +## [--timeout <int>] [--throttle <bool>] [--workers <int>] [--maxjob <int>] [--maxnode <int>] +## [--bal <int>] [--verbose <bool>] [--verbosity <int>] [--debug <bool>] [--extra <str>] +## Options: +## host (optional): +## - Space or comma delimited managed nodes to use. +## - Entering one more managed nodes overrries the auto detection feature which +## will build a cluster of managed nodes to run on. +## - Only the host prefix is needed, e.g. 'ec01150a' +## user (optional): +## - Ansible user authorized to run tests on the managed node. +## python (requred): -> +## - IBM enterprise python version, e.g 3.10', '3.11', '3.12' +## zoau (optional): +## - ZOAU version to use. e.g. 1.2.5, 1.3.0, 1.3.1 +## pythonpath (optional): +## - The absolute path to where the ZOAU python module is located. +## - The can be for the precopiled binary, wheels or setup tools installation home. +## - Default is to use the precompiled binary (until we establish wheel locations) +## volumes (optional): +## - The volumes to use with the test cases, overrides the auto volume assignment. +# - Defaults to, "222222,000000" +## file (optional): +## - Space or comma delimited test suites that should be included in the result. +## - A test suite is a collection of test cases in a file that starts with +## 'test' and ends in '.py'. +## - Do not include the absolute path, this is automatically deteremined. +## - For all functional tests, use the `functional/*` notation. +## - For all unit tests, use the `unit/*` notation for directories. +## - Default is all functional and unit tests. +## - A directory of test cases is such that it contains test suites. +## skip (optional): +## - Space or comma delimited test suites that should not be included +## in the result. +## - Supply only the test suite name, the tooling will prepend the +## necessay path. +## - Default is to skip 'test_module_security.py', this can not be removed but +## it can be replaced with another test or tests. +## itr (optional): +## - Configure the number of iterations to rerun failed test cases. +## - Each iteration will run only the prior iterations failed tests until +## either their are no more iterations left or there are no more failed +## tests to run. +## - Default is 50 so that full regression can succeed. +## replay (optional): +## - Instruct the CE to replay the entire command with all provided options +## for only the failed tests. +## - The idea behind this is if you did not set enough iterations, rather than +## start all over you could instruce CE to rerun with the failed test cases +## it has recorded, giving a higher probabity there will be success. +## - Each replay will run only the prior iterations failed tests until +## either their are no more replay's left or there are no more failed +## tests to run. +## - Default is 5, so that full regression can succeed. +## timeout (optional): +## - The maximum time in seconds a job should wait for completion. +## - When set, a subprocess call executing pytest will waith this amount of time. +## - Default is 300 seconds (5 minutes). +## throttle (optional): +## - Configuration throttles the managed node test execution such that a node will +## only run one one job at at time, no matter the threads. +## - If disabled (False), concurrency will increase, but has the risk of encountering +## increased ansible connnection failures, while this could result in shorter regression +## it could also result in longer times because of failed connections. +## - Default is True, managed nodes will only execute one test at time. +## workers (optional): +## - The numerical multiplier used to increase the number of worker threads. +## - This value is multiplied by the number of managed nodes to calculate the +## number of threads to start the CE thread pool with. +## - Default is 1, so CE will have 1 thread for each managed node. +## - Any value greater than 1, will automatically disable throttle. +## - At this time, setting more threads could result in connection failures, see throttle. +## maxjob (optional): +## - The maximum number of times a test case can fail before its removed from the job queue. +## - This is helpful in indentifying a bug, possibly in a test case or module. +## - Setting this value sets an upper bound limit on how many times a test case is permitted +## to fail. +## - Default is 10, such that the test will no longer be permitted to execute after 10. +## maxnode (optional): +## - The maximum number tests that can fail on a managed node before the node is removed +## from the node queue. +## - This helpful in identifying a problematic managed node such that it may require an IPL. +## - Default is 30, such that the managede will no longer be permitted to run tests after 30. +## - After the default is exceeded, the managde node is set to OFFLINE status. +## bal (optional): +## - The maximum number of times a test is perimtted to fail on a given managed node +## before be assigned to a new managed node. +## - This is helpful in identifying test cases that may be experiencing managned node latency, +## this allows CE to assign the test case to a new less active managed node such that it might +## a higher chance of success. +## - Default is 10, after a test case fails 10 times on a node it will be assigned to a new managed node. +## verbose (optional): +## - Instruct CE to run with verbose stdout to the console. +## - This will instruct CE to write all statistics to stdout. +## - Default is 'False', no verbosity to the console. +## - Statistics are always written to directory '/tmp' as text and HTML files. +## - Files in '/tmp' will follow this name pattern, eg conncurrent-excutor-log-<replay>-<status>-<date>.<ext> +## - examples are: +## - concurrent-executor-log-00:21:24.txt +## - concurrent-executor-log-replay-1-failure-00:21:24.html +## - concurrent-executor-tests-replay-1-success-00:21:24.html +## verbosity (optional): +## - Configure pytest verbosity level. +## - Integer value corresponds to verbosity level. +## - 1 = -v, 2 = -vv, 3 = -vvv, 4 = -vvvv +## - Default is 0, no verbosity. +## debug (optional): +## - Instruct Pytest whether to capture any output (stdout/stderr), equivalent of pytest -s. +## - Default False +## extra (optional): +## - Extra commands passed to subprocess before pytest execution +## - This is helpful if you want to expose insert an enviroment var or even +## run a shell command before exeucting, e.g 'cd ../..' +## returncode (optional): +## - Instruct CE whether to return a return code. +## - If 'True', the stdout is surpressed and a return code is sent to stdout. +## - A zero return code means the overall execution has successed for the configuration submitted, +## where a non-zero return code represents the number of failed tests. +## - Default is False +## Example: +## $ ac --test-concurrent --host ec01130a --python 3.11 --zoau 1.3.0 +## $ ac --test-concurrent --host ec01130a --python 3.11 --zoau 1.3.0 --file test_zos_operator_func.py --debug true +## $ ac --test-concurrent --host "ec01130a,ec33012a,ec33017a" --python 3.11 --zoau 1.3.0\ +## $ --file test_zos_operator_func.py,test_zos_job_submit_func.py\ +## $ --skip "test_zos_job_submit_func.py::test_job_from_gdg_source[0]" --debug true +## $ ac --test-concurrent --host ec01130a --python 3.11 --zoau 1.3.0 --file test_zos_operator_func.py --returncode True --itr 1 +## $ ac --test-concurrent --host ec01130a --python 3.11 --zoau 1.3.1 --file test_zos_data_set_func.py --itr 1 --replay 1 +## test_case_1 +test_concurrent(){ + + # ---------------------------------------------------------------------------------------------------------------------------------- + # CE -> AC -> AC vars -> var mapping -> defaults + # ---------------------------------------------------------------------------------------------------------------------------------- + # --hostnames -> --host -> host=$1 -> pass through -> adhoc else auto discovered + # --user -> --user -> user=$2 -> pass through -> adhoc else auto discovered + # --pyz -> --python -> python=$3 -> pass through -> adhoc (auto translated to absolute path) + # --zoau -> --zoau -> zoau=$4 -> pass through -> adhoc (auto translated to absolute path) + # --pythonpath -> --pythonpath -> pythonpath=$5 -> pass through -> 'zoau/lib' or 'zoau/lib/<pyz version>' + # --volumes -> --volumes -> volumes=$6 -> pass through -> "222222,000000" + # --paths -> --file -> file=$7 -> pass through -> "functional/*,unit/*" + # --skip -> --skip -> skip=$8 -> pass through -> "test_module_security.py" + # --itr -> --itr -> itr=$9 -> pass through -> 50 + # --replay -> --replay -> replay=$10 -> pass through -> 5 + # --timeout -> --timeout -> timeout=$11 -> pass through -> 300 + # --throttle -> --throttle -> throttle=$12 -> True = '--throttle', else '--no-throttle' -> True + # --workers -> --workers -> workers=$13 -> pass through -> 1 + # --maxjob -> --maxjob -> maxjob=$14 -> pass through -> 10 + # --maxnode -> --maxnode -> maxnode=$15 -> pass through -> 30 + # --bal -> --bal -> bal=$16 -> pass through -> 10 + # --verbose -> --verbose -> verbose=$17 -> True = '--verbose', else '--no-verbose' -> False + # --verbosity -> --verbosity -> verbosity=$18 -> pass through -> 0 + # --capture -> --debug -> debug=$19 -> True = '--capture', else '--no-capture' -> False + # --extra -> --extr -> extra=$20 -> pass through -> "cd `pwd`" + # ---------------------------------------------------------------------------------------------------------------------------------- + + # echo "host=${1} user=${2} python=${3} zoau=${4} pythonpath=${5} volumes=${6} file=${7} skip=${8} itr=${9} replay=${10}"\ + # "timeout=${11} throttle=${12} workers=${13} maxjob=${14} maxnode=${15} bal=${16} verbose=${17} verbosity=${18} debug=${19} extra=${20} returncode=${21}" + + host="${1}" + user="${2}" + python="${3}" + zoau="${4}" + pythonpath="${5}" + volumes="${6}" + file="${7}" + skip="${8}" + itr="${9}" + replay="${10}" + timeout="${11}" + throttle="${12}" + workers="${13}" + maxjob="${14}" + maxnode="${15}" + bal="${16}" + verbose="${17}" + verbosity="${18}" + debug="${19}" + extra="${20}" + returncode="${21}" + + # Invoke shell script helpers to set variables if host is not null + if [ ! -z "${host}" ]; then + hostname=$($VENV/./venv.sh --host-credentials "${host}") + + if [ -z "${user}" ]; then + user=$($VENV/./venv.sh --user-credentials "${host}") + fi + + if [ -z "${pass}" ]; then + pass=$($VENV/./venv.sh --pass-credentials "${host}") + fi + + host=$hostname + fi + + # Convert the python from short notation to absolute path + python=$($VENV/./mounts.sh --get-python-mount "${python}") + + zoau=$($VENV/./mounts.sh --get-zoau-mount "${zoau}") + + # Build a zoau precompiled binary path if $pythonpath is null + if [ -z "${pythonpath}" ]; then + zoau_version=`echo ${zoau#*/v}` + if [ $(normalize_version "${zoau_version}") -lt 1003000000 ]; then + pythonpath=$zoau/lib + else + suffix=`echo ${python#*cyp/v}` + version=`echo ${suffix%*/pyz}` + pythonpath_version=`echo $version|sed 's/r/./g'` + pythonpath=$zoau/lib/$pythonpath_version + fi + fi + + first_entry=true + file_tests="" + strings_func=$(ac_test_pytest_finder "functional/*" "" true true) + strings_unit=$(ac_test_pytest_finder "unit/*" "" true true) + + corrected_file="" + # Support shorter notation passed to the utils module 'get_test_case()'', the python module + # does not support such notation because the module can be run from many locations and requires + # absolute paths. This notation is translated to absolute paths. + # TODO: Add support for invidual tests, --file test_load_balance_full.py::test_case_1, issue 1636 + for i in $(echo $file | sed "s/,/ /g") # Optionally: skip=\"`echo $2 | tr ',' ' '`\" + do + if [ "$i" == "functional/*" ];then + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/functional/modules/" + else + file_tests="$file_tests $CURR_DIR/tests/functional/modules/" + fi + elif [ "$i" == "unit/*" ];then + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/unit/" + else + file_tests="$file_tests $CURR_DIR/tests/unit/" + fi + elif echo $strings_func | tr ' ' '\n'|grep $i >/dev/null; then + if [ ! -e "$CURR_DIR/tests/functional/modules/$i" ]; then + message_error "File = $CURR_DIR/tests/functional/modules/$i not found." + fi + + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/functional/modules/$i" + else + file_tests="$file_tests $CURR_DIR/tests/functional/modules/$i" + fi + elif echo $strings_unit | tr ' ' '\n'|grep $i >/dev/null; then + if [ ! -e "$CURR_DIR/tests/unit/$i" ]; then + message_error "File = $CURR_DIR/tests/unit/$i not found." + fi + + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/unit/" + else + file_tests="$file_tests $CURR_DIR/tests/unit/$i" + fi + elif [[ $i == tests/functional/modules/* ]] || [[ $i == $CURR_DIR/tests/functional/modules/* ]] ;then + message_warn "It appears an absolute path has been used, 'ac' will try to truncate it to the test suite." + suffix=`echo ${i#*/modules/}` + + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/functional/modules/$suffix" + else + file_tests="$file_tests $CURR_DIR/tests/functional/modules/$suffix" + fi + elif [[ $i == tests/unit/* ]] || [[ $i == $CURR_DIR/tests/unit/* ]] ;then + message_warn "It appears an absolute path has been used, 'ac' will try to truncate it to the test suite." + suffix=`echo ${i#*/modules/}` + + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/unit/$suffix" + else + file_tests="$file_tests $CURR_DIR/tests/unit/$suffix" + fi + else + message_error "File = $i, not found in project path = $CURR_DIR." + fi + done + first_entry=true + file=$file_tests + + # Convert any comma separated strings to space delimited as needed by the tooling. + first_entry=true + skip_tests="" + for i in $(echo $skip | sed "s/,/ /g") # Optionally: skip=\"`echo $2 | tr ',' ' '`\" + do + if [ "$first_entry" == "true" ];then + first_entry=false + skip_tests="$CURR_DIR/tests/functional/modules/$i" + else + skip_tests="$skip_tests $CURR_DIR/tests/functional/modules/$i" + fi + done + skip=$skip_tests + + # Uppercase value for --throttle + throttle=`echo $throttle | tr '[:lower:]' '[:upper:]'` + if [ "$throttle" == "TRUE" ];then + throttle="--throttle" + else + throttle="--no-throttle" + fi + + # Uppercase value for --verbose + verbose=`echo $verbose | tr '[:lower:]' '[:upper:]'` + if [ "$verbose" == "TRUE" ];then + verbose="--verbose" + else + verbose="--no-verbose" + fi + + # Uppercase value for --capture + debug=`echo $debug | tr '[:lower:]' '[:upper:]'` + if [ "$debug" == "TRUE" ];then + debug="--capture" + else + debug="--no-capture" + fi + + # Uppercase value for --capture + returncode=`echo $returncode | tr '[:lower:]' '[:upper:]'` + if [ "$returncode" == "TRUE" ];then + returncode="--returncode" + else + returncode="--no-returncode" + fi + + # Useful for debug to see what is acutally passed what ./ac will pass to ce.py + # echo "host=${host} user=${user} python=${python} zoau=${zoau} pythonpath=${pythonpath} volumes=${volumes},"\ + # "file=${file} skip=${skip} itr=${itr} replay=${replay} timeout=${timeout} throttle=${throttle}"\ + # "workers=${workers} maxjob=${maxjob} maxnode=${maxnode} bal=${bal} verbose=${verbose}"\ + # "verbosity=${verbosity} debug=${debug} extra=${extra} returncode=${returncode}" + + # read _host _user _pass <<<$($VENV/./venv.sh --host-credentials "${host}") + message "Concurrent executor testing is evaluating supplied options and preparing to execute." + . $VENV_BIN/activate && python3 $VENV/ce.py\ + --hostnames "${host}"\ + --user "${user}"\ + --pyz "${python}"\ + --zoau "${zoau}"\ + --pythonpath "${pythonpath}"\ + --volumes "${volumes}"\ + --paths "${file}"\ + --skip "${skip}"\ + --itr "${itr}"\ + --replay "${replay}"\ + --timeout "${timeout}"\ + "${throttle}"\ + --workers "${workers}"\ + --maxjob "${maxjob}"\ + --maxnode "${maxnode}"\ + --bal "${bal}"\ + "${verbose}"\ + --verbosity "${verbosity}"\ + "${debug}"\ + --extra "${extra}"\ + "${returncode}" } # ------------------------------------------------------------------------------ @@ -485,6 +885,203 @@ ac_test_config(){ fi } +# ------------------------------------------------------------------------------ +# Get a list of all test cases from the dependency finder depending on options +# ------------------------------------------------------------------------------ +#->test-dep-find: +## Determine which test suites to run given the options selected. +## Usage: ac --test-dep-find [--branch <str,str>] [--skip <str, str>] +## Options: +## branch (optional): +## - The branch to compare to when performing dependency analaysis. The +## comparison always uses the currently checked out local branch and +## compares that to the 'branch' supplied. +## - The default branch is 'dev' +## skip (optional): +## - Space or comma delimited test suites that should not be included +## in the result. +## - Supply only the test suite name, the tooling will prepend the +## necessay path. +## - Default is to skip 'test_module_security.py', this can not be removed but +## it can be replaced with another test or tests. +## pretty (optional): +## - Pretty formatting where each value is a line follwoed by a line feed, +## otherwise a list[str] format is returned. +## Example: +## $ ac --test-dep-find --branch main --skip "test_module_security.py,test_zos_apf_func.py" --pretty False +## $ ac --test-dep-find --branch dev --skip "test_zos_apf_func.py" +## $ ac --test-dep-find --branch main +## $ ac --test-dep-find + +ac_test_dep_finder(){ + branch=$1 + skip="$2" + gh_branch=`git branch |grep "*" | cut -d" " -f2` + + # Convert any comma separated strings to space delimited as needed by the tooling. + first_entry=true + skip_tests="" + for i in $(echo $skip | sed "s/,/ /g") # Optionally: skip=\"`echo $2 | tr ',' ' '`\" + do + if [ "$first_entry" == "true" ];then + first_entry=false + skip_tests="$CURR_DIR/tests/functional/modules/$i" + else + skip_tests="$skip_tests $CURR_DIR/tests/functional/modules/$i" + fi + done + skip=$skip_tests + + # If branch is defined provide results on that comparison branch else default to the `dev` branch. + if [ -z "$branch" ]; then + branch="dev" + message "Compiling a list functional and unit tests suites excluding skipped tests." + . $VENV_BIN/activate && result=`$VENV_BIN/python ${VENV}/dependencyfinder.py -p ${CURRENT_DIR} -a -s "${skip}"` + else + message "Compiling a list dependent tests cases to run based on the changes between local branch '$gh_branch' and target branch '$branch', excluding skipped tests." + . $VENV_BIN/activate && result=`$VENV_BIN/python ${VENV}/dependencyfinder.py -p ${CURRENT_DIR} -b ${branch} -s "${skip}" -m` + fi + + # Uppercase value for --pretty + pretty=`echo $3 | tr '[:lower:]' '[:upper:]'` + if [ "$pretty" == "TRUE" ];then + echo $result |tr '[[:space:]]' '\n' + else + export env_result=${result} + result=`$VENV_BIN/python3 -c "from os import environ;all = environ['env_result'].split(',');print(str(all))"` + echo $result + fi +} + +# ------------------------------------------------------------------------------ +# Get a list of all test cases using pytest including parameterization +# ------------------------------------------------------------------------------ +#->test-pytest-find: +## Get a list of parametizd test cases used by pytest +## Usage: ac --test-pytest-find [--file <str, str>] [--skip <str, str>] [--pretty <bool>] +## Options: +## file (optional): +## - Space or comma delimited test suites that should be included +## in the result. +## - A test suite is a collection of test cases in a file that starts with +## 'test' and ends in '.py'. +## - For all functional tests, use the `functional/*` notation for directories. +## - For all unit tests, use the `unit/*` notation for directories. +## - Default is all functional and unit tests. +## - A directory of test cases is such that it contains test suites. +## skip (optional) - (str): +## - Space or comma delimited test suites that should be omitted +## in the result. +## - A test suite is a collection of test cases in a file that starts with +## 'test' and ends in '.py'. +## - Default is to skip 'test_module_security.py', this can not be removed but +## it can be replaced with another test or tests. +## - Test cases can be parametrized such they use the '::' syntax or not. +## - Skip does not support directories. +## pretty (optional): +## - Pretty formatting where each value is a line follwoed by a line feed, +## otherwise a list[str] format is returned. +## Example: +## $ ac --test-pytest-find --file "test_zos_copy_func.py,test_zos_mvs_raw_unit.py" --skip "test_zos_job_submit_func.py,test_module_security.py" --pretty false +## $ ac --test-pytest-find --file "functional/*,unit/*" --skip "test_module_security.py" +## $ ac --test-pytest-find --file "test_zos_copy_func.py" +## $ ac --test-pytest-find --pretty true +## $ ac --test-pytest-find|wc -l +ac_test_pytest_finder(){ + file=$1 + skip="$2" + slience_messages="$4" # Undocumented internal interface option to disable INFO messages + + first_entry=true + file_tests="" + strings_func=$(ls "$CURR_DIR/tests/functional/modules/") + strings_unit=$(ls "$CURR_DIR/tests/unit/") + + # As a courtesy, aid in shorter notation supplied to the utils module get_test_case(), the python module + # does not support such notation because the module can be run from many locations and requires absolute + # paths. + for i in $(echo $file | sed "s/,/ /g") # Optionally: skip=\"`echo $2 | tr ',' ' '`\" + do + if [ "$i" == "functional/*" ];then + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/functional/modules/" + else + file_tests="$file_tests $CURR_DIR/tests/functional/modules/" + fi + elif [ "$i" == "unit/*" ];then + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/unit/" + else + file_tests="$file_tests $CURR_DIR/tests/unit/" + fi + elif echo $strings_func | tr ' ' '\n'|grep $i >/dev/null; then + if [ ! -e "$CURR_DIR/tests/functional/modules/$i" ]; then + message_error "File = $CURR_DIR/tests/functional/modules/$i not found." + fi + + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/functional/modules/$i" + else + file_tests="$file_tests $CURR_DIR/tests/functional/modules/$i" + fi + elif echo $strings_unit | tr ' ' '\n'|grep $i >/dev/null; then + if [ ! -e "$CURR_DIR/tests/unit/$i" ]; then + message_error "File = $CURR_DIR/tests/unit/$i not found." + fi + + if [ "$first_entry" == "true" ];then + first_entry=false + file_tests="$CURR_DIR/tests/unit/" + else + file_tests="$file_tests $CURR_DIR/tests/unit/$i" + fi + else + message_error "File = $i, not found in project path = $CURR_DIR." + fi + done + first_entry=true + file=$file_tests + export env_file=${file} + + # Convert any comma separated strings to space delimited as needed by the tooling. + first_entry=true + skip_tests="" + for i in $(echo $skip | sed "s/,/ /g") # Optionally: skip=\"`echo $2 | tr ',' ' '`\" + do + if [ "$first_entry" == "true" ];then + first_entry=false + skip_tests="$CURR_DIR/tests/functional/modules/$i" + else + skip_tests="$skip_tests $CURR_DIR/tests/functional/modules/$i" + fi + done + + skip=$skip_tests + export env_skip=${skip} + + + slience_messages=`echo $slience_messages | tr '[:lower:]' '[:upper:]'` + if [ "$slience_messages" != "TRUE" ];then + message "Compiling a list of test cases for the provided test suites excluding any skip tests.." $slience_messages + fi + + . $VENV_BIN/activate && result=`export PYTHONPATH=$VENV;$VENV_BIN/python -c "from modules.utils import get_test_cases;from os import environ;\ + tests = get_test_cases(paths=environ['env_file'], skip=environ['env_skip']);all=','.join(tests);print(all)"` + + # Uppercase value for --pretty + pretty=`echo $3 | tr '[:lower:]' '[:upper:]'` + if [ "$pretty" == "TRUE" ];then + echo $result |tr ',' '\n'; + else + export env_result=${result} + result=`$VENV_BIN/python3 -c "from os import environ;all = environ['env_result'].split(',');print(str(all))"` + echo $result + fi +} + # ------------------------------------------------------------------------------ # Check the version of the ibm_zos_core collection installed # ------------------------------------------------------------------------------ @@ -637,16 +1234,28 @@ host_mounts(){ } # ------------------------------------------------------------------------------ -# Print the managed z/OS node IDs +# Print the z/OS node IDs and hostnames # ------------------------------------------------------------------------------ #->host-nodes: -## Display the z/OS managed node IDs. -## Usage: ac [--host-nodes] +## Display the z/OS node IDs and hostnames +## Usage: ac [--host-nodes --all <boolean>] +## Options: +## all - A list of all nodes, default is true. If all is set to false, +## only a list space delimited nodes are returned. ## Example: ## $ ac --host-nodes +## $ ac --host-nodes --all false host_nodes(){ - message "Print local managed node IDs." - $VENV/venv.sh --targets + + + if [ "$all" == "false" ]; then + message "Print z/OS production hostnames." + result=`$VENV/venv.sh --targets-production` + else + message "Print z/OS node IDs and hostnames." + result=`$VENV/venv.sh --targets` + fi + echo $result } # ------------------------------------------------------------------------------ @@ -672,7 +1281,6 @@ venv_setup(){ # ------------------------------------------------------------------------------ # Allows you to activate the lastet ansible managed virtual enviroments -# TODO: Allow user to specify which venv they can start # ------------------------------------------------------------------------------ #->venv-start: ## Activate the latest ansible managed virtual environment or optionally start @@ -695,12 +1303,11 @@ venv_start(){ message "Starting managed python virtual environment: $VENV_BASENAME" #. $VENV_BIN/activate; exec /bin/sh -i - /bin/bash -c ". $VENV_BIN/activate; exec /bin/sh -i" + /bin/bash -c ". $VENV_BIN/activate; exec /bin/sh -i;" } # ------------------------------------------------------------------------------ # Allows you to deactivate the lastet ansible managed virtual enviroments -# TODO: Allow user to specify which venv they can stop # ------------------------------------------------------------------------------ #->venv-stop: ## Deactivate the latest ansible managed virtual environment or optionally deactivate @@ -722,8 +1329,13 @@ venv_stop(){ fi message "Stopping managed ansible virtual environment located at: $VENV_BASENAME" - message "ac --venv-stop does not actually currently work, use CNTL-D" - . deactivate $VENV_BASENAME 2>/dev/null; + # message "ac --venv-stop does not actually currently work, use CNTL-D" + # . deactivate $VENV_BASENAME; + # deactivate venv/$VENV_BASENAME + venv_tty=`tty` + venv_tty=`basename $venv_tty` + venv_pid=`ps -ef |grep $venv_tty | grep -v "grep" | grep "/bin/sh -i" | awk '{print $3}'` + kill -9 $venv_pid > /dev/null 2>&1 } # ============================================================================== @@ -749,35 +1361,35 @@ while true; do fi exit ;; - --ac-bandit) # Command + --ac-bandit) # Command ensure_managed_venv_exists $1 option_submitted="--ac-bandit" ;; - --ac-build) # Command + --ac-build) # Command ensure_managed_venv_exists $1 option_submitted="--ac-build" ;; - --ac-galaxy-importer) # Command + --ac-galaxy-importer) # Command ensure_managed_venv_exists $1 option_submitted="--ac-galaxy-importer" ;; - --ac-changelog) # Command + --ac-changelog) # Command ensure_managed_venv_exists $1 option_submitted="--ac-changelog" ;; - --ac-module-doc) # Command + --ac-module-doc) # Command ensure_managed_venv_exists $1 option_submitted="--ac-module-doc" ;; - --ac-install) - ensure_managed_venv_exists $1 # Command + --ac-install) # Command + ensure_managed_venv_exists $1 option_submitted="--ac-install" ;; - --ac-lint) - ensure_managed_venv_exists $1 # Command + --ac-lint) # Command + ensure_managed_venv_exists $1 option_submitted="--ac-lint" ;; - --ac-sanity |--ac-sanity=?*) # Command + --ac-sanity |--ac-sanity=?*) # Command ensure_managed_venv_exists $1 option_submitted="--ac-sanity" ;; @@ -789,6 +1401,18 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--ac-test-config" ;; + --test-concurrent|--test-concurrent=?*) # command + ensure_managed_venv_exists $1 + option_submitted="--test-concurrent" + ;; + --test-dep-find|--test-dep-find=?*) # command + ensure_managed_venv_exists $1 + option_submitted="--test-dep-find" + ;; + --test-pytest-find|--test-pytest-find=?*) # command + ensure_managed_venv_exists $1 + option_submitted="--test-pytest-find" + ;; --ac-version) # Command ensure_managed_venv_exists $1 option_submitted="--ac-version" @@ -805,7 +1429,7 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--host-auth" ;; - --host-config) # Command + --host-config) # Command ensure_managed_venv_exists $1 option_submitted="--host-config" ;; @@ -813,8 +1437,8 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--host-mount" ;; - --host-mounts) - ensure_managed_venv_exists $1 # Command + --host-mounts) # Command + ensure_managed_venv_exists $1 option_submitted="--host-mounts" ;; --host-nodes) # Command @@ -832,16 +1456,36 @@ while true; do ensure_managed_venv_exists $1 option_submitted="--venv-stop" ;; + --all|--all=?*) # option + all=`option_processor $1 $2` + option_sanitize $all + shift + ;; + --bal|--bal=?*) # option + bal=`option_processor $1 $2` + option_sanitize $bal + shift + ;; --command|--command=?*) # option command=`option_processor $1 $2` option_sanitize $command shift ;; + --branch|--branch=?*) # option + branch=`option_processor $1 $2` + option_sanitize $branch + shift + ;; --debug|--debug=?*) # option debug=`option_processor $1 $2` option_sanitize $debug shift ;; + --extra|--extra=?*) # option + extra=`option_processor $1 $2` + option_sanitize $extra + shift + ;; --file|--file=?*) # option file=`option_processor $1 $2` option_sanitize $file @@ -852,12 +1496,32 @@ while true; do option_sanitize $host shift ;; + --itr|--itr=?*) # option + itr=`option_processor $1 $2` + option_sanitize $itr + shift + ;; --level|--level=?*) # option level=`option_processor $1 $2` option_sanitize $level shift ;; - --name|--name=?*) # option + --level|--level=?*) # option + level=`option_processor $1 $2` + option_sanitize $level + shift + ;; + --maxjob|--maxjob=?*) # option + maxjob=`option_processor $1 $2` + option_sanitize $maxjob + shift + ;; + --maxnode|--maxnode=?*) # option + maxnode=`option_processor $1 $2` + option_sanitize $maxnode + shift + ;; + --name|--name=?*) # option name=`option_processor $1 $2` option_sanitize $name shift @@ -872,26 +1536,86 @@ while true; do option_sanitize $password shift ;; + --pretty|--pretty=?*) # option + pretty=`option_processor $1 $2` + option_sanitize $pretty + shift + ;; --python|--python=?*) # option python=`option_processor $1 $2` option_sanitize $python shift ;; + --pythonpath|--pythonpath=?*) # option + pythonpath=`option_processor $1 $2` + option_sanitize $pythonpath + shift + ;; + --replay|--replay=?*) # option + replay=`option_processor $1 "$2"` + option_sanitize "$replay" + shift + ;; + --returncode|--returncode=?*) # option + returncode=`option_processor $1 "$2"` + option_sanitize "$returncode" + shift + ;; + --skip|--skip=?*) # option + skip=`option_processor $1 "$2"` + option_sanitize "$skip" + shift + ;; --test|--test=?*) # option test=`option_processor $1 $2` option_sanitize $test shift ;; - # --tests|--tests=?*) # option + --timeout|--timeout=?*) # option + timeout=`option_processor $1 $2` + option_sanitize $timeout + shift + ;; + --throttle|--throttle=?*) # option + throttle=`option_processor $1 $2` + option_sanitize $throttle + shift + ;; + # --tests|--tests=?*) # option # tests=`option_processor $1 $2` # option_sanitize $tests # shift # ;; + --user|--user=?*) # option + user=`option_processor $1 $2` + option_sanitize $user + shift + ;; + --verbose|--verbose=?*) # option + verbose=`option_processor $1 $2` + option_sanitize $verbose + shift + ;; + --verbosity|--verbosity=?*) # option + verbosity=`option_processor $1 $2` + option_sanitize $verbosity + shift + ;; --version|--version=?*) # option version=`option_processor $1 $2` option_sanitize $version shift ;; + --workers|--workers=?*) # option + workers=`option_processor $1 $2` + option_sanitize $workers + shift + ;; + --volumes|--volumes=?*) # option + volumes=`option_processor $1 $2` + option_sanitize $volumes + shift + ;; --zoau|--zoau=?*) # option zoau=`option_processor $1 $2` option_sanitize $zoau @@ -940,8 +1664,19 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-sanity" ] ; then ac_sanity $version elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test" ] ; then ac_test ${host:=""} ${python:=""} ${zoau:=""} ${file:=""} ${test:=""} ${debug:=""} +elif [ "$option_submitted" ] && [ "$option_submitted" = "--test-concurrent" ] ; then + test_concurrent ${host:=""} ${user:=""} ${python:=""} ${zoau:=""} ${pythonpath:=""}\ + ${volumes:="222222,000000"} ${file:="functional/*,unit/*"} "${skip:="test_module_security.py"}"\ + ${itr:="50"} ${replay:="5"} ${timeout:="300"} ${throttle:="True"} ${workers:="1"}\ + ${maxjob:="10"} ${maxnode:="30"} ${bal:="10"} ${verbose:="False"} ${verbosity:="0"}\ + ${debug:="False"} ${extra:="cd `pwd`"} ${returncode:="False"} elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test-config" ] ; then ac_test_config +elif [ "$option_submitted" ] && [ "$option_submitted" = "--test-dep-find" ] ; then + ac_test_dep_finder ${branch:=""} "${skip:="test_module_security.py"}" ${pretty:="true"} +elif [ "$option_submitted" ] && [ "$option_submitted" = "--test-pytest-find" ] ; then + ac_test_pytest_finder ${file:="functional/*,unit/*"} "${skip:="test_module_security.py"}"\ + ${pretty:="true"} elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-version" ] ; then ac_version elif [ "$option_submitted" ] && [ "$option_submitted" = "--file-encrypt" ] ; then @@ -955,7 +1690,7 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-mount" ] ; then elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-mounts" ] ; then host_mounts elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-nodes" ] ; then - host_nodes + host_nodes ${all} elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-setup" ] ; then venv_setup $password elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-start" ] ; then diff --git a/ansible.cfg b/ansible.cfg index a6d62f711..b452495ff 100644 --- a/ansible.cfg +++ b/ansible.cfg @@ -42,3 +42,6 @@ pipelining = True [colors] verbose = green + +[persistent_connection] +command_timeout = 60 diff --git a/changelogs/fragments/828-adds-concurrent-executor.yml b/changelogs/fragments/828-adds-concurrent-executor.yml new file mode 100644 index 000000000..9da97836e --- /dev/null +++ b/changelogs/fragments/828-adds-concurrent-executor.yml @@ -0,0 +1,7 @@ +trivial: + - ce.py - Adds the concurrent executor capable of running + test cases concurrently against a pool of managed nodes. + (https://github.com/ansible-collections/ibm_zos_core/pull/828). + - zinventory-raw - a new pytest fixture that can accept a JSON + vs a configuration file. + (https://github.com/ansible-collections/ibm_zos_core/pull/828). diff --git a/scripts/ce.py b/scripts/ce.py new file mode 100644 index 000000000..75bb142ca --- /dev/null +++ b/scripts/ce.py @@ -0,0 +1,2089 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +Module CE is used to run ansible test cases concurrently to a pool of managed +nodes. This module is tailored to z/OS managed nodes and currently has a dependency +on a shell script and the managed venv's provided by the 'ac' tool. +""" + +# pylint: disable=line-too-long, too-many-lines, fixme, too-many-instance-attributes +# pylint: disable=redefined-builtin, too-many-public-methods,too-many-arguments, too-many-locals +# pylint: disable=consider-using-f-string, too-many-branches, too-many-statements + + +import argparse +import json +import sys +import subprocess +import textwrap +import threading +from enum import Enum +from threading import Lock +import time +from concurrent.futures import ThreadPoolExecutor +from concurrent.futures import as_completed +from contextlib import contextmanager +from datetime import datetime +from collections import OrderedDict, namedtuple +from typing import List, Tuple +from prettytable import PrettyTable, ALL +from modules.utils import get_test_cases + + +# ------------------------------------------------------------------------------ +# Enums +# ------------------------------------------------------------------------------ +class Status (Enum): + """ + Represents the online/offline status of a managed node. + + Attributes: + ONLINE : Status - The node is online. + OFFLINE : Status - The node is offline. + + Methods: + number() - Returns the integer value of the status. + string() - Returns the string representation of the status. + is_equal(other) - Checks if this status is equal to another status. + is_online() - Checks if this status is online. + default() - Returns the default status (ONLINE). + """ + + ONLINE=(1, "online") + OFFLINE=(0, "offline") + + def __str__(self) -> str: + """ + Convert the name of the project to lowercase when converting it to a string. + + Return: + str: The lowercase name of the project. + """ + return self.name.lower() + + def number(self) -> int: + """ + Returns the numerical element of the tuple. + 1 for ONLINE and 0 for OFFLINE. + + Return: + int: The numerical element of the tuple. + 1 for ONLINE and 0 for OFFLINE. + """ + return self.value[0] + + def string(self) -> str: + """ + Returns the string value contained in the tuple. + 'online' for ONLINE and 'offline' for OFFLINE. + + Return: + str: The string value contained in the tuple. + 'online' for ONLINE and 'offline' for OFFLINE. + """ + return self.value[1] + + def is_equal(self, other) -> bool: + """ + Checks if two tuples numerical value are the same. + + Parameters: + other (status): The other tuple to compare to. + + Return: + bool: True if the numerical tuple values are the same, False otherwise. + """ + return self.number() == other.number() + + def is_online(self) -> bool: + """ + Checks if the tuple is ONLINE, if it equates to 1 + + Return: + bool: True if the tuple is ONLINE, False otherwise. + """ + return self.number() == 1 + + @classmethod + def default(cls): + """ + Return default status of ONLINE. + + Return: + Status: Return the ONLINE status. + """ + return cls.ONLINE + +class State (Enum): + """ + This class represents the state of a job. It has three + possible values: success, failure, and exceeded-max-failure. + + Attributes: + SUCCESS (State): A job succeeded execution. + FAILURE (State): A job failed to execute. + EXCEEDED (State): A job has exceeded its maximum allowable. + failures and will no longer be run in the thread pool. + """ + SUCCESS=(1, "success") + FAILURE=(0, "failure") + EXCEEDED=(2, "exceeded-max-failure") + + def __str__(self) -> str: + """ + Returns the name of the state in uppercase letters. + + Return: + str: The name of the state in uppercase letters. + 'SUCCESS' a job succeeded execution. + 'FAILURE' a job failed to execute. + 'EXCEEDED' a job has exceeded its maximum allowable failures. + """ + return self.name.upper() + + + def number(self) -> int: + """ + Returns the numeric value of the state. + + Return: + int: The numeric value of the state. + 1 for 'SUCCESS' a job succeeded execution. + 2 for 'FAILURE' a job failed to execute. + 3 for 'EXCEEDED' a job has exceeded its maximum allowable failures. + """ + return self.value[0] + + def string(self) -> str: + """ + Returns the string representation of the state. + + Return: + str: The string value of the state. + 'success' a job succeeded execution. + 'failure' a job failed to execute. + 'exceeded-max-failure' a job has exceeded its maximum allowable failures. + """ + return self.value[1] + + def is_equal(self, other: Enum) -> bool: + """ + Checks if this state is equal to another state by comparing + the numerical values for the two states. + + Args: + other (State): The other state to compare to. + + Return: + bool: True if the states are equal, False otherwise. + """ + return self.number() == other.number() + + def is_success(self) -> bool: + """ + Checks if this state is successful (SUCCESS) by + ensuring the numerical value is 1. + + Return: + bool: True if the state is successful, False otherwise. + """ + return self.number() == 1 + + def is_failure(self) -> bool: + """ + Checks if this state is a failure (FAILURE) by + ensuring the numerical value is 0. + + Return: + bool: True if the state is a failure, False otherwise. + """ + return self.number() == 0 + + def is_balanced(self) -> bool: + """ + Checks if this state has exceeded (EXCEEDED) by + ensuring the numerical value is 2. + + Return: + bool: True if the state has exceeded, False otherwise. + """ + return self.number() == 2 + +# ------------------------------------------------------------------------------ +# Class Dictionary +# ------------------------------------------------------------------------------ + +class Dictionary(): + """ + This is a wrapper class around a dictionary that provides additional locks + and logic for when interacting with any of the entries being accessed by + a thread pool to ensure safe access. + """ + + def __init__(self): + self._shared_dictionary = {} + self._lock = Lock() + + @contextmanager + def _acquire_with_timeout(self, timeout: int = -1) -> bool: + """ + Acquires a lock with a timeout in milliseconds. + + Parameters: + timeout (int): The maximum time to wait for the lock in milliseconds. + If -1, waits indefinitely. + + Return: + bool: True if the lock was acquired, False otherwise. + """ + result = self._lock.acquire(timeout=timeout) + try: + yield result + finally: + if result: + self._lock.release() + + # Likely works but not tested but also saw no need for this. + # def remove_items(self, remove): + # for key in list(remove.keys()): + # with self._lock: + # if key in self._shared_dictionary: + # self._shared_dictionary.pop(key) + + def pop(self, key, timeout: int = 100) -> object: + """ + Removes the entry from the dictionary and returns it. + Entry will no longer in remain in the dictionary. + + Parameters: + key (str): The key of the item to remove. + timeout (int): The maximum time to wait for acquiring the lock. + Default is 100ms. + + Return: + object: The value of the removed item. + """ + with self._acquire_with_timeout(timeout) as acquired: + if acquired: + if self._shared_dictionary: + if key in self._shared_dictionary: + return self._shared_dictionary.pop(key) + return None + + def get(self, key, timeout: int = 10) -> object: + """ + Retrieves the value associated with the given key from the dictionary. + + Args: + key (str): The key of the entry to retrieve. + timeout (int): The maximum time to wait for the lock, in seconds. + Defaults to 10 seconds. + + Return: + Any: The value associated with the given key. + + Raises: + KeyError: If the key does not exist in the dictionary. + TimeoutError: If the lock cannot be acquired before the timeout expires. + """ + with self._acquire_with_timeout(timeout) as acquired: + if acquired: + return self._shared_dictionary[key] + return None + + def update(self, key, obj) -> None: + """ + Update the dictionary with a new entry, functions same as add(...). + If the entry exists, it will be replaced. + + Parameters: + key (str): The key for the dictionary entry. + obj (object): The object to be stored in the dictionary. + """ + with self._lock: + self._shared_dictionary[key]=obj + + def add(self, key, obj) -> None: + """ + Add an entry to the dictionary, functions same as update(...). + If the entry exists, it will be replaced. + + Parameters: + key (str): The key for the dictionary entry. + obj (object): The object to be stored in the dictionary. + """ + with self._lock: + self._shared_dictionary[key]=obj + + def items(self) -> None: + """ + Returns a tuple (key, value) for each entry in the dictionary. + + Returns: + A tuple containing the key and value of each entry in the dictionary. + """ + with self._lock: + return self._shared_dictionary.items() + + def len(self) -> int: + """ + Returns the length of the dictionary. + + Returns: + int: The length of the dictionary. + + Example: + <dictionary>.len() + """ + with self._lock: + return len(self._shared_dictionary) + + def keys(self) -> List[str]: + """ + Returns a list of all keys in the dictionary. + + Returns: + List[str]: A list of all keys in the shared dictionary. + """ + with self._lock: + return self._shared_dictionary.keys() + +# ------------------------------------------------------------------------------ +# Class job +# ------------------------------------------------------------------------------ +class Job: + """ + Job represents a unit of work that the ThreadPoolExecutor will execute. A job + maintains all necessary attributes to allow the test case to execute on a + z/OS managed node. + + Parameters: + hostname (str): Full hostname for the z/OS manage node the Ansible workload will be executed on. + nodes (str): Node object that represents a z/OS managed node and all its attributes. + testcase (str): The USS absolute path to a testcase using '/path/to/test_suite.py::test_case' + id (int): The id that will be assigned to this job, a unique identifier. The id will be used + as the key in a dictionary. + """ + + def __init__(self, hostname: str, nodes: Dictionary, testcase: str, id: int): + """ + Parameters: + hostname (str): Full hostname for the z/OS manage node the Ansible workload + will be executed on. + nodes (str): Node object that represents a z/OS managed node and all its + attributes. + testcase (str): The USS absolute path to a testcase using + '/path/to/test_suite.py::test_case' + id (int): The id that will be assigned to this job, a unique identifier. The id will + be used as the key in a dictionary. + """ + self._hostnames: list = [] + self._hostnames.append(hostname) + self._testcase: str = testcase + self._capture: str = None + self._failures: int = 0 + self._id: int = id + self._rc: int = -1 + self._successful: bool = False + self._elapsed: str = None + self._hostpattern: str = "all" + self._nodes: Dictionary = nodes + self._stdout_and_stderr: list[Tuple[str, str, str]] = [] + self._stdout: list[Tuple[str, str, str]] = [] + self._verbose: str = None + + def __str__(self) -> str: + """ + This function returns a string representation of the Job. + + Parameters: + self (Job): The Job object to be represented as a string. + + Returns: + A string representation of the Job object. + """ + temp = { + "_hostname": self.get_hostname(), + "_testcase": self._testcase, + "_capture": self._capture, + "_failures": self._failures, + "_id": self._id, + "_rc": self._rc, + "_successful": self._successful, + "_elapsed": self._elapsed, + "_hostpattern": self._hostpattern, + "_pytest-command": self.get_command(), + "verbose": self._verbose + } + + return str(temp) + + def get_command(self) -> str: + """ + Returns a command designed to run with the projects pytest fixture. The command + is created specifically based on the args defined, such as ZOAU or test cases to run. + + Parameters: + self (Job) An instance of the class containing the method. + + Returns: + str: A string representing the pytest command to be executed. + + Example Return: + pytest tests/functional/modules/test_zos_job_submit_func.py::test_job_submit_pds[location1]\ + --host-pattern=allNoneNone --zinventory-raw='{"host": "ec33025a.vmec.svl.ibm.com",\ + "user": "omvsadm", "zoau": "/zoau/v1.3.1",\ + "pyz": "/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz",\ + "pythonpath": "/zoau/v1.3.1/lib/3.10", "extra_args": {"volumes": ["222222", "000000"]}}' + + """ + node_temp = self._nodes.get(self.get_hostname()) + node_inventory = node_temp.get_inventory_as_string() + + return f"""pytest {self._testcase} --host-pattern={self._hostpattern} + {self._capture if self._capture else ""} + {self._verbose if self._verbose else ""} --zinventory-raw='{node_inventory}'""" + + + def get_hostnames(self) -> list[str]: + """ + Return all hostnames that have been assigned to this job over time as a list. + Includes hostnames that later replaced with new hostnames because the host is + considered no longer functioning. + + Return: + list[str]: A list of all hosts. + """ + return self._hostnames + + def get_hostname(self) -> str: + """ + Return the current hostname assigned to this node, in other words, the active hostname. + + Return: + str: The current hostname assigned to this job. + """ + return self._hostnames[-1] + + def get_testcase(self) -> str: + """ + Return a pytest parametrized testcase that is assigned to this job. + Incudes absolute path, testcase, and parametrization, eg <path/test.py::test[parameter]> + + Return: + str: Returns absolute path, testcase, and parametrization, + eg <path/test.py::test[parameter]> + """ + return self._testcase + + def get_failure_count(self) -> int: + """ + Return the number of failed job executions that have occurred for this job. + Failures can be a result of the z/OS managed node, a bug in the test case or even a + connection issue. This is used for statistical purposes or reason to assign the test + to a new hostname. + + Return: + int: Number representing number of failed executions. + """ + return self._failures + + def get_rc(self) -> int: + """ + The return code for the jobs execution. + + Return: + int: + Return code 0 All tests were collected and passed successfully (pytest) + Return code 1 Tests were collected and run but some of the tests failed (pytest) + Return code 2 Test execution was interrupted by the user (pytest) + Return code 3 Internal error happened while executing tests (pytest) + Return code 4 pytest command line usage error (pytest) + Return code 5 No tests were collected (pytest) + Return code 6 No z/OS nodes available. + Return code 7 Re-balancing of z/OS nodes were performed + Return code 8 Job has exceeded permitted job failures + Return code 9 Job has exceeded timeout + """ + return self._rc + + def get_id(self) -> int: + """ + Returns the job id used as the key in the dictionary to identify the job. + + Return: + int: Id of the job + """ + return self._id + + def get_successful(self) -> bool: + """ + Returns True if the job has completed execution. + + Return: + bool: True if the job completed, otherwise False. + + See Also: + get_rc() - Returns 0 for success, otherwise non-zero. + """ + return self._successful + + def get_elapsed_time(self) -> str: + """ + Returns the elapsed time for this job, in other words, + how long it took this job to run. + + Return: + str: Time formatted as <HH:MM:SS.ss> , eg 00:05:30.64 + """ + return self._elapsed + + def get_nodes(self) -> Dictionary: + """ + Returns a dictionary of all the z/OS managed nodes available. + z/OS managed nodes are passed to a job so that a job can + interact with the nodes configuration, for example, + if a job needs to mark the node as offline, it can easily + access the dictionary of z/OS managed nodes to do so. + + Return: + Dictionary[str, node]: Thread safe Dictionary of z/OS managed nodes. + """ + return self._nodes + + def get_stdout_and_stderr_msgs(self) -> list[Tuple[str, str, str]]: + """ + Return all stdout and stderr messages that have been assigned to + this job over time as a list. + + Return: + list[str]: A list of all stderr and stdout messages. + """ + return self._stdout_and_stderr + + def get_stdout_msgs(self) -> list[Tuple[str, str, str]]: + """ + Return all stdout messages that have been assigned to this job + over time as a list. + + Return: + list[str]: A list of all stderr and stdout messages. + """ + return self._stdout + + def get_stdout_and_stderr_msg(self) -> Tuple[str, str, str]: + """ + Return the current stdout and stderr message assigned to this node, in + other words, the last message resulting from this jobs execution. + + Return: + str: The current concatenated stderr and stdout message. + """ + return self._stdout_and_stderr[-1] + + def get_stdout_msg(self) -> Tuple[str, str, str]: + """ + Return the current stdout message assigned to this node, in other + words, the last message resulting from this jobs execution. + + Return: + str: The current concatenated stderr and stdout message. + """ + return self._stdout[-1] + + def set_rc(self, rc: int) -> None: + """ + Set the jobs return code obtained from execution. + + Parameters: + rc (int): Value that is returned from the jobs execution + """ + self._rc = rc + + def set_success(self) -> None: + """ + Mark the job as having completed successfully. + + Parameters: + completed (bool): True if the job has successfully returned + with a RC 0, otherwise False. + """ + self._successful = True + + def add_hostname(self, hostname: str) -> None: + """ + Set the hostname of where the job will be run. + + Parameters: + hostname (str): Hostname of the z/OS managed node. + """ + self._hostnames.append(hostname) + + def increment_failure(self) -> None: + """ + Increment the failure by 1 for this jobs. Each time the job + returns with a non-zero return code, increment the value + so this statistic can be reused in other logic. + """ + self._failures +=1 + + def set_elapsed_time(self, start_time: time) -> None: + """ + Set the start time to obtain the elapsed time this + job took to run. Should only set this when RC is zero. + + Parameters: + start_time (time): The time the job started. A start time should be + captured before the job is run, and passed to this + function after the job completes for accuracy of + elapsed time. + """ + self._elapsed = elapsed_time(start_time) + + def set_capture(self, capture: bool) -> None: + """ + Indicate if pytest should run with '-s', which will + show output and not to capture any output. Pytest + captures all output sent to stdout and stderr, + so you won't see the printed output in the console + when running tests unless a test fails. + """ + if capture is True: + self._capture = " -s" + + def set_verbose(self, verbosity: int) -> None: + """ + Indicate if pytest should run with verbosity to show + detailed console outputs and debug failing tests. + Verbosity is defined by the number of v's passed + to py test. + + If verbosity is outside of the numerical range, no + verbosity is set. + + Parameters: + int: Integer range 1 - 4 + 1 = -v + 2 = -vv + 3 = -vvv + 4 = -vvvv + """ + if verbosity == 1: + self._verbose = " -v" + elif verbosity == 2: + self._verbose = " -vv" + elif verbosity == 3: + self._verbose = " -vvv" + elif verbosity == 4: + self._verbose = " -vvvv" + + def set_stdout_and_stderr(self, message: str, std_out_err: str, date_time: str) -> None: + """ + Add a stdout and stderr concatenated message resulting from the jobs + execution (generally std out/err resulting from pytest) the job. + + Parameters: + message (str): Message associated with the stdout and stderr output. Message + describes the std_out_err entry. + stdout_stderr (str): Stdout and stderr concatenated into one string. + date_time (str): Date and time when the stdout and stderr output was generated. + """ + + Joblog = namedtuple('Joblog',['id', 'hostname', 'command', 'message', 'std_out_err', 'date_time']) + + joblog = Joblog(self._id, self._hostnames[-1], self.get_command(), message, std_out_err, date_time) + self._stdout_and_stderr.append(joblog) + + def set_stdout(self, message: str, std_out_err: str, date_time: str) -> None: + """ + Add a stdout concatenated message resulting from the jobs + execution (generally std out/err resulting from pytest) the job. + + Parameters: + message (str): Message associated with the stdout/stderr output. + stdout_stderr (str): Stdout and stderr concatenated into one string. + date_time (str): Date and time when the stdout/stderr was generated. + """ + Joblog = namedtuple('Joblog',['id', 'hostname', 'command', 'message', 'std_out_err', 'date_time']) + + joblog = Joblog(self._id, self._hostnames[-1], self.get_command(), message, std_out_err, date_time) + self._stdout.append(joblog) + +# ------------------------------------------------------------------------------ +# Class Node +# ------------------------------------------------------------------------------ + + +class Node: + """ + A z/OS node suitable for Ansible tests to execute. Attributes such as 'host', + 'zoau', 'user' and 'pyz' , etc are maintained in this class instance because + these attributes can vary between nodes. These attributes are then used to + create a dictionary for use with pytest fixture 'zinventory-raw'. + + This node will also track the health of the node, whether its status.ONLINE + meaning its discoverable and useable or status.OFFLINE meaning over time, + since being status.ONLINE, it has been determined unusable and thus marked + as status.OFFLINE. + + Parameters: + hostname (str): Hostname for the z/OS managed node the Ansible workload + will be executed on. + user (str): The USS user who will run the Ansible workload on z/OS. + zoau (str): The USS absolute path to where ZOAU is installed. + pyz( str): The USS absolute path to where python is installed. + """ + + + def __init__(self, hostname: str, user: str, zoau: str, pyz: str, pythonpath: str, volumes: str): + """ + parser.add_argument('--pythonpath', type=str, help='Absolute path to the + ZOAU Python modules, precompiled or wheels.', required=True, + metavar='<str>', default="") parser.add_argument('--volumes' + Parameters: + hostname (str): Hostname for the z/OS managed node the Ansible + workload + will be executed on. + user (str): The USS user who will run the Ansible workload on z/OS. + zoau (str): The USS absolute path to where ZOAU is installed. pyz( + str): The USS absolute path to where python is installed. + + """ + self._hostname: str = hostname + self._user: str = user + self._zoau: str = zoau + self._pyz: str = pyz + self._pythonpath: str = pythonpath + self._volumes: str = volumes + self._state: Status = Status.ONLINE + self._failures: set[int] = set() + self._balanced: set[int] = set() + self._inventory: dict [str, str] = {} + self._inventory.update({'host': self._hostname}) + self._inventory.update({'user': self._user}) + self._inventory.update({'zoau': self._zoau}) + self._inventory.update({'pyz': self._pyz}) + self._inventory.update({'pythonpath': self._pythonpath}) + self._extra_args = {} + self._extra_args.update({'extra_args':{'volumes':self._volumes.split(",")}}) + self._inventory.update(self._extra_args) + self._assigned = Dictionary() + self._failure_count: int = 0 + self._assigned_count: int = 0 + self._balanced_count: int = 0 + self._running_job_id: int = -1 + + def __str__(self) -> str: + """ + String representation of the Node class. Not every class + variable is returned, some of the dictionaries which track + state are large and should be accessed directly from those + class members. + """ + temp = { + "_hostname": self._hostname, + "_user": self._user, + "_zoau": self._zoau, + "_pyz": self._pyz, + "_pythonpath": self._pythonpath, + "_volumes": self._volumes, + "_state": str(self._state), + "inventory": self.get_inventory_as_string(), + "_failure_count": str(self._failure_count), + "_assigned_count": str(self._assigned_count), + "_balanced_count": str(self._balanced_count), + "_running_job_id": str(self._running_job_id) + } + return str(temp) + + def set_state(self, state: Status) -> None: + """ + Set status of the node, is the z/OS node ONLINE (useable) + or OFFLINE (not usable). + + Parameters: + state (Status): Set state to Status.ONLINE or Status.OFFLINE. + Use Status.ONLINE to signal the managed node is healthy, use + Status.OFFLINE to signal the managed node should not used + to run any jobs. + """ + self._state = state + + def set_failure_job_id(self, id: int) -> None: + """ + Update the node with any jobs which fail to run. If a job fails to run, + add the job ID to the nodes class. A Job failure occurs when the + execution of the job is a non-zero return code. + + Parameters: + id (int): The ID of the job that failed to run. + """ + self._failures.add(id) + self._failure_count = len(self._failures) + + def set_assigned_job(self, job: Job) -> None: + """ + Add a job to the Node that has been assigned to this node (z/OS managed node). + + Parameters: + job (Job): The job that has been assigned to this node. + """ + self._assigned.add(job.get_id(),job) + self._assigned_count +=1 + + def set_balanced_job_id(self, id: int) -> None: + """ + Add a jobs ID to the node, when a job has been rebalanced. + + Parameters: + id (int): The job ID to add to the set of balanced jobs. + """ + self._balanced.add(id) + + def set_running_job_id(self, running_job_id: int) -> None: + """ + Set the ID of the currently running job. + + Parameters: + running_job_id (int): The ID of the currently running job. + """ + self._running_job_id = running_job_id + + def get_state(self) -> Status: + """ + Get the z/OS manage node status. + + Return: + Status.ONLINE: If the z/OS managed node state is usable. + Status.OFFLINE: If the z/OS managed node state is unusable. + """ + return self._state + + def get_hostname(self) -> str: + """ + Get the hostname for this managed node. A node is a + z/OS host capable of running an Ansible unit of work. + + Return: + str: The managed nodes hostname. + """ + return self._hostname + + def get_user(self) -> str: + """ + Get the users id that is permitted to run an Ansible workload on + the managed node. + + Return: + str: Unix System Services (USS) user name + """ + return self._user + + def get_zoau(self) -> str: + """ + Get the ZOAU home directory path found on the managed node. + + Return: + str: Unix System Services (USS) absolute path of where + ZOAU is installed. + """ + return self._zoau + + def get_pyz(self) -> str: + """ + Get the Python home directory path found on the managed node. + + Return: + str: Unix System Services (USS) absolute path of where + python is installed. + """ + return self._pyz + + def get_inventory_as_string(self) -> str: + """ + Get a JSON string of the inventory that can be used with + the 'zinventory-raw' pytest fixture. This JSON string can be + passed directly to the option 'zinventory-raw', for example: + + pytest .... --zinventory-raw='{.....}' + + Return: + str: A JSON string of the managed node inventory attributes. + """ + return json.dumps(self._inventory) + + def get_inventory_as_dict(self) -> dict [str, str]: + """ + Get a dictionary that can be used with the 'zinventory-raw' + pytest fixture. This is the dict() not a string, you might + choose this so you can dynamically update the dictionary and + then use json.dumps(...) to convert it to string and pass it + to zinventory-raw'. + + Return: + dict [str, str]: A dictionary of the managed node + inventory attributes. + """ + return self._inventory + + def get_failure_jobs_as_dictionary(self) -> Dictionary: + """ + Get a Dictionary() of all jobs which have failed on this node. + + Return: + Dictionary[int, Job]: A Dictionary() of all Job(s) that have + been assigned and failed on this Node. + """ + return self._failures + + def get_assigned_jobs_as_string(self) -> str: + """ + Get a JSON string of all jobs which have been assigned to this node. + + Return: + str: A JSON string representation of a job. + """ + return json.dumps(self._assigned) + + def get_assigned_jobs_as_dictionary(self) -> Dictionary: + """ + Get a Dictionary of all jobs which have been assigned to this node. + + Return: + Dictionary[int, Job]: A Dictionary of all jobs which have + failed on this node. + """ + return self._assigned + + def get_failure_job_count(self) -> int: + """ + Get the numerical count of how many Job(s) have failed on this + Node with a non-zero return code. + + Returns: + int: The number of failed Jobs. + """ + return self._failure_count + + def get_assigned_job_count(self) -> int: + """ + Get the numerical count of how many Job(s) have been assigned + to this Node. + + Returns: + int: The number of Jobs assigned to this Node. + """ + return self._assigned_count + + def get_balanced_job_count(self) -> int: + """ + Get the numerical count of how many Job(s) have been + reassigned (balanced) to this Node. + + Returns: + int: The number of jobs which have been balanced onto + this node. + """ + self._balanced_count = len(self._balanced) + return self._balanced_count + + def get_running_job_id(self) -> int: + """ + Get the job id of the currently running job. + + Returns: + int: The job id of the currently running job. + """ + return self._running_job_id + +# ------------------------------------------------------------------------------ +# Helper methods +# ------------------------------------------------------------------------------ + +def get_jobs(nodes: Dictionary, paths:str, skip: str, capture: bool, verbosity: int, replay: bool = False) -> Dictionary: + """ get_test_cases(test_suites: str, test_directories: str = None, skip: str = None): + Get a thread safe dictionary of job(s). + A job represents a test case, a unit of work the ThreadPoolExecutor will run. + A job manages the state of a test case as well as the necessary information + to run on a z/OS managed node. + + Parameters: + paths (str): Absolute path of directories containing test suites or absolute + path of individual test suites comma or space delimited. + A directory of test cases is such that it contains test suites. + A test suite is a collection of test cases in a file that starts with + 'test' and ends in '.py'. + skip (str): (Optional) Absolute path of either test suites, or test cases. + Test cases can be parametrized such they use the '::' syntax or not. + Skip does not support directories. + + Raises: + FileNotFoundError : If a test suite, test case or skipped test cannot be found. + RuntimeError : When no z/OS managed hosts were online. + + Returns: + Dictionary [int, Job]: A thread safe Dictionary containing numeric keys (ID) with value + type Job, each Dictionary item is a testcase with supporting + attributes necessary to execute on a z/OS managed node. + """ + + hostnames=list(nodes.keys()) + hostnames_length = nodes.len() + parametrized_test_cases = [] + if hostnames_length == 0: + raise RuntimeError('No z/OS managed hosts were online, please check host availability.') + + # Thread safe dictionary of Jobs + jobs = Dictionary() + index = 0 + hostnames_index = 0 + + if not replay: + parametrized_test_cases = get_test_cases(paths, skip) + else: + parametrized_test_cases = paths.split(',') + + for parametrized_test_case in parametrized_test_cases: + + # Assign each job a hostname using round robin (modulus % division) + if hostnames_index % hostnames_length == 0: + hostnames_index = 0 + + # Create a job, add it jobs Dictionary, update node reference + hostname = hostnames[hostnames_index] + _job = Job(hostname = hostname, nodes = nodes, testcase=parametrized_test_case, id=index) + _job.set_verbose(verbosity) + _job.set_capture(capture) + jobs.update(index, _job) + nodes.get(hostname).set_assigned_job(_job) + index += 1 + hostnames_index += 1 + + # for key, value in jobs.items(): + # print(f"The job count = {str(jobs.len())}, job id = {str(key)} , job = {str(value)}") + + return jobs + + +def update_job_hostname(job: Job): + """ + Updates the job with a new hostname. Jobs rely on healthy hostnames and when + its determine that the z/OS hostname that is being accessed has become + incapable of addressing any unit of work, this method will append a new + z/os hostname for the job to execute its job on. This method ensures + that it is a randomly different node then the one previously assigned + to the job. + + This is referred to as re-balancing a jobs hostname, this happens when + a job has consistently failed N number of times. + + TODO: + - Iterate over all jobs looking for inactive ones and balance all of the + job nodes. - after giving this some thou + """ + + unsorted_items = {} + nodes = job.get_nodes() + + # We need the Jobs assigned host names (job.get_hostnames() -> list[str]) + set_of_nodes_assigned_to_job: set = set(job.get_hostnames()) + + set_of_nodes_online: set = set() + for key, value in job.get_nodes().items(): + if value.get_state().is_online(): + set_of_nodes_online.add(key) + + # The difference of all available z/OS zos_nodes and ones assigned to a job. + nodes_available_and_online = list(set_of_nodes_online - set_of_nodes_assigned_to_job) + + for hostname in nodes_available_and_online: + count = nodes.get(hostname).get_assigned_job_count() + unsorted_items[hostname] = count + + sorted_items_by_assigned = OrderedDict(sorted(unsorted_items.items(), key=lambda x: x[1])) + # for key, value in sorted_items_by_assigned.items(): + # print(f" Sorted by assigned are; key = {key}, value = {value}.") + + # After being sorted ascending, assign the first index which will have been the lease used connection. + if len(sorted_items_by_assigned) > 0: + hostname = list(sorted_items_by_assigned)[0] + job.add_hostname(hostname) + nodes.get(hostname).set_assigned_job(job) + + +def get_nodes(user: str, zoau: str, pyz: str, hostnames: list[str] = None, pythonpath: str = None, volumes: str = None) -> Dictionary: + """ + Get a thread safe Dictionary of active z/OS managed nodes. + + Parameters: + user (str): The USS user name who will run the Ansible workload on z/OS. + zoau (str): The USS absolute path to where ZOAU is installed. + pyz (str): The USS absolute path to where python is installed. + + Returns: + Dictionary [str, Node]: Thread safe Dictionary containing all the active z/OS managed nodes. + The dictionary key will be the z/OS managed node's hostname and the value + will be of type Node. + """ + nodes = Dictionary() + + if hostnames is None: + hostnames = [] + + # Calling venv.sh directly to avoid the ac dependency, ac usually lives in project root so an + # additional arg would have to be passed like so: "cd ..;./ac --host-nodes --all false" + result = subprocess.run(["echo `./venv.sh --targets-production`"], shell=True, capture_output=True, text=True, check=False) + hostnames = result.stdout.split() + else: + hostnames = hostnames[0].split(',') + + # Prune any production system that fails to ping + for hostname in hostnames: + command = ['ping', '-c', '1', hostname] + result = subprocess.run(args=command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=False) + + # TODO: Use the connection class to connection and validate ZOAU and Python before adding the nodes + if result.returncode == 0: + node=Node(hostname = hostname, user = user, zoau = zoau, pyz = pyz, pythonpath = pythonpath, volumes = volumes) + node.set_state(Status.ONLINE) + nodes.update(key = hostname, obj = node) + + return nodes + +def get_nodes_online_count(nodes: Dictionary) -> int: + """ + Get a count of how many managed Node(s) have status that is equal to Status.ONLINE. + A value greater than or equal to 1 signifies that Job(s) can continue to execute, + otherwise there are no managed nodes capable or running a job. + + A Node is set to Status.OFFLINE when the value used for --bal (balance) is + surpassed. Balance (--bal) is used to signal that Job has run N number of times + on a particular host and had a non-zero return code and should be used by any other Job. + + Parameters: + nodes (Dictionary [ str, node]): Thread safe dictionary z/OS managed nodes. + + Returns: + int: The numerical count of nodes that are online. + """ + nodes_online_count = 0 + for _, value in nodes.items(): + if value.get_state().is_online(): + nodes_online_count += 1 + + return nodes_online_count + +def get_nodes_offline_count(nodes: Dictionary) -> int: + """ + Get a count of how many managed Node(s) have status that is equal to Status.OFFLINE. + A value greater than or equal to 1 signifies that Job(s) have failed to run on this + node and that this node should not be used any further. + + A Node is set to Status.OFFLINE when the value used for --bal (balance) is + surpassed. Balance (--bal) is used to signal that Job has run N number of times + on a particular host and had a non-zero return code and should be used by any other Job. + + Parameters: + nodes (dictionary [ str, node]) Thread safe dictionary z/OS managed nodes. + + Returns: + int - The numerical count of nodes that are offline. + """ + nodes_offline_count = 0 + for _, value in nodes.items(): + if not value.get_state().is_online(): + nodes_offline_count += 1 + + return nodes_offline_count + +# def set_nodes_offline(nodes: Dictionary, maxnode: int) -> None: +# for key, value in nodes.items(): +# if value.get_balanced_count() > maxnode: +# value.set_state(Status.OFFLINE) + +def set_node_offline(node: Node, maxnode: int) -> None: + """ + Sets a node offline if it has exceeded maxnode, the number of permitted + balanced jobs for a node. 'maxnode' is defined as the maximum number of + times a node can fail to run a job before its set to 'offline' indicating + the node is no longer suitable for job execution. + + Parameters: + node (Node): The node to check for balanced jobs. + maxnode (int): The maximum number of balanced jobs + allowed on a node before it is set offline. + """ + if node.get_balanced_job_count() > maxnode: + node.set_state(Status.OFFLINE) + +def get_jobs_statistics(jobs: Dictionary, maxjob: int) -> Tuple[int, list[str], int, list[str], int, list[str], list[str], int, int, list[str], list[str]]: + """ + Collect result data that can be used to generate a log/history of the + programs execution, such as how many jobs ran, how many failed, etc. + + Parameters: + jobs (Dictionary [int, job]) - A dictionary of jobs keyed by their id. + maxjob (int): The maximum number of times a job can fail before its disabled + in the job queue. + + Returns + jobs_total_count (int): The number of jobs that have been scheduled to run. + jobs_success_tests (ist[str]): A list of test cases that were successful. + jobs_success_log (list[str]): A list of log messages associated with the + successful test cases. + jobs_failed_count (int): The total number of jobs that failed. + jobs_failed_tests (list[str]): A list of test cases that failed. + jobs_failed_log: (list[str]): A list of log messages associated with the + failed test cases. + jobs_rebalanced_count (int): The total number of jobs that had their + hostnames rebalanced. + jobs_failed_count_maxjob (int): The total number of jobs that failed + multiple times (exceeded maxjob). + jobs_failed_maxjob_tests (list[str]): A list of test cases that failed + multiple times (exceeded maxjob). + jobs_failed_maxjob_log (list[str]): A list of log messages associated with + the failed test cases that exceeded maxjob. + + Example: + >>>> stats = get_jobs_statistics(jobs, args.maxjob) + >>>> print(f" {stats.jobs_success_count}, {stats.jobs_total_count}, etc) + + Raises: + TypeError: + - If the input argument jobs is not a dictionary + - If any of the values in the jobs dictionary are not instances of the Job class + """ + jobs_success_count = 0 + jobs_success_tests = [] + jobs_failed_count = 0 + jobs_failed_tests = [] + jobs_total_count = 0 + jobs_success_log = [] + jobs_failed_log = [] + jobs_rebalanced_count = 0 + jobs_failed_count_maxjob = 0 + jobs_failed_maxjob_tests =[] + jobs_failed_maxjob_log = [] + + for _, value in jobs.items(): + # Total count of jobs (same as len(jobs)) + jobs_total_count +=1 + + # Total of jobs that have been rebalanced + if len(value.get_hostnames()) > 1: + jobs_rebalanced_count +=1 + + # Total of jobs have a successful status + if value.get_successful(): + jobs_success_count += 1 + jobs_success_tests.append(value.get_testcase()) + jobs_success_log.extend(value.get_stdout_msgs()) + else: + # Total of jobs that have a failure status + if not value.get_successful(): + jobs_failed_count += 1 + jobs_failed_tests.append(value.get_testcase()) + jobs_failed_log.extend(value.get_stdout_and_stderr_msgs()) + # Total of jobs that have failure status and exceeded maxjob, this + # differs from the total of that have a failure status in that maxjob + # has exceeded, while a job can fail and never exceed maxjob because + # there are no healthy z/OS managed nodes to execute on. + if value.get_failure_count() >= maxjob: + jobs_failed_count_maxjob += 1 + jobs_failed_maxjob_tests.append(value.get_testcase()) + jobs_failed_maxjob_log.extend(value.get_stdout_and_stderr_msgs()) + + Statistics = namedtuple('Statistics', + ['jobs_total_count', + 'jobs_success_count', + 'jobs_success_tests', + 'jobs_success_log', + 'jobs_failed_count', + 'jobs_failed_tests', + 'jobs_failed_log', + 'jobs_rebalanced_count', + 'jobs_failed_count_maxjob', + 'jobs_failed_maxjob_tests', + 'jobs_failed_maxjob_log']) + result = Statistics(jobs_total_count, + jobs_success_count, + jobs_success_tests, + jobs_success_log, + jobs_failed_count, + jobs_failed_tests, + jobs_failed_log, + jobs_rebalanced_count, + jobs_failed_count_maxjob, + jobs_failed_maxjob_tests, + jobs_failed_maxjob_log) + + return result + +def get_failed_count_gt_maxjob(jobs: Dictionary, maxjob: int) -> Tuple[int, list[str], dict[int, str], int]: + """ + This function takes in a dictionary of jobs and a maximum job failure count threshold, and returns a tuple containing: + 1. The number of jobs that have failed more than the maximum job failure count threshold. + 2. A list of test cases for those jobs that have failed more than the maximum job failure count threshold. + 3. A dictionary mapping each failed job's ID to its stdout and stderr messages. + 4. The number of jobs that were rebalanced after the maximum job failure count threshold was exceeded. + + Parameters: + jobs (Dictionary): A dictionary mapping job IDs to Job objects. + maxjob (int): The maximum number of times a job can fail before it is considered a failure. + + Returns: + Tuple[int, list[str], dict[int, str], int]: A tuple containing the number of jobs that have + failed more than the maximum job failure count threshold, a list of test cases for those + jobs that have failed more than the maximum job failure count threshold, a dictionary + mapping each failed job's ID to its stdout and stderr messages, and the number of jobs + that were rebalanced after the maximum job failure count threshold was exceeded. + """ + jobs_failed_count = 0 + jobs_failed_list = [] + jobs_failed_log = [] + jobs_rebalanced = 0 + for key, value in jobs.items(): + if value.get_failure_count() >= maxjob: + jobs_failed_count += 1 + jobs_failed_list.append(value.get_testcase()) + jobs_failed_log.append({key : value.get_stdout_and_stderr_msgs()}) + if len(value.get_hostnames()) > 1: + jobs_rebalanced +=1 + #TODO: refactor these tuples to include gt or max to not confused with get jobs statistics + return (jobs_failed_count, jobs_failed_list, jobs_failed_log, jobs_rebalanced) + +def run(id: int, jobs: Dictionary, nodes: Dictionary, timeout: int, maxjob: int, bal: int, extra: str, maxnode: int, throttle: bool) -> Tuple[int, str]: + """ + Runs a job (test case) on a managed node and ensures the job has the necessary + managed node available. If not, it will manage the node and collect the statistics + so that it can be properly run when a resource becomes available. + + Parameters + id (int): Numerical ID assigned to a job. + jobs (Dictionary): A dictionary of jobs, the ID is paired to a job. + A job is a test cased designed to be run by pytest. + nodes (Dictionary): Managed nodes that jobs will run on. These are z/OS + managed nodes. + timeout (int):The maximum time in seconds a job should run on z/OS for, + default is 300 seconds. + maxjob (int): The maximum number of times a job can fail before its + disabled in the job queue + bal (int): The count at which a job is balanced from one z/OS node + to another for execution. + extra (str): Extra commands passed to subprocess before pytest execution + maxnode (int): The maximum number of times a node can fail to run a + job before its set to 'offline' in the node queue. + + Returns: + A tuple of (rc: int, message: str) is returned. + rc (int): + Return code 0 All tests were collected and passed successfully (pytest). + Return code 1 Tests were collected and run but some of the tests failed (pytest). + Return code 2 Test execution was interrupted by the user (pytest). + Return code 3 Internal error happened while executing tests (pytest). + Return code 4 pytest command line usage error (pytest). + Return code 5 No tests were collected (pytest). + Return code 6 No z/OS nodes available. + Return code 7 Re-balancing of z/OS nodes were performed. + Return code 8 Job has exceeded permitted job failures. + Return code 9 Job has exceeded timeout. + Return code 10 Job is being passed over because the node that + is going to run the job is executing another job. + + message (str): Description and details of the jobs execution, contains + return code, hostname, job id, etc. Informational and useful when + understanding the job's lifecycle. + """ + + job = jobs.get(id) + hostname = job.get_hostname() + #id = str(job.get_id()) + elapsed = 0 + message = None + rc = None + result = None + + node_count_online = get_nodes_online_count(nodes) + if node_count_online > 0: + node = nodes.get(hostname) + # Temporary solution to avoid nodes running concurrent work loads + # if get_nodes_offline_count(nodes) == 0 and node.get_running_job_id() == -1: + # TODO: Why check if there are no offline nodes , feels like node.get_running_job_id() would have been enough. + + if throttle and (node.get_running_job_id() != -1): + rc = 10 + job.set_rc(rc) + nodes_count = nodes.len() + node_count_offline = get_nodes_offline_count(nodes) + #other = node.get_assigned_jobs_as_dictionary().get(id) + date_time = datetime.now().strftime("%H:%M:%S") #("%d/%m/%Y %H:%M:%S") + rsn = f"Managed node is not able to execute job id={node.get_running_job_id()}, nodes={nodes_count}, offline={node_count_offline}, online={node_count_online}." + message = f"Job id={id}, host={hostname}, start={date_time}, elapsed={0}, rc={rc}, msg={rsn}" + node.set_running_job_id(-1) # Set it to false after message string + node.set_balanced_job_id(id) + #set_node_offline(node, maxnode) + update_job_hostname(job) + else: + node.set_running_job_id(id) + start_time = time.time() + date_time = datetime.now().strftime("%H:%M:%S") #"%d/%m/%Y %H:%M:%S") + thread_name = threading.current_thread().name + try: + # Build command and strategically map stdout and stderr so that both are mapped to stderr and the pytest rc goes to stdout. + cmd = f"{extra};{job.get_command()} 1>&2; echo $? >&1" + result = subprocess.run([cmd], shell=True, capture_output=True, text=True, timeout=timeout, check=False) + node.set_running_job_id(-1) + job.set_elapsed_time(start_time) + elapsed = job.get_elapsed_time() + rc = int(result.stdout) + + if rc == 0: + job.set_rc(rc) + job.set_success() + rsn = "Job successfully executed." + message = f"Job id={id}, host={hostname}, start={date_time}, elapsed={elapsed}, rc={rc}, thread={thread_name}, msg={rsn}" + pytest_std_out_err = result.stderr + job.set_stdout(message, pytest_std_out_err, date_time) + else: + job_failures = job.get_failure_count() + + if job_failures >= maxjob: + rc = 8 + job.set_rc(rc) + rsn = f"Test exceeded allowable failures={maxjob}." + message = f"Job id={id}, host={hostname}, start time={date_time}, elapsed={elapsed}, rc={rc}, thread={thread_name}, msg={rsn}" + elif job_failures == bal: + rc = 7 + job.set_rc(rc) + node.set_balanced_job_id(id) + set_node_offline(node, maxnode) + update_job_hostname(job) + rsn = f"Job is reassigned to managed node={job.get_hostname()}, job exceeded allowable balance={bal}." + message = f"Job id={id}, host={hostname}, start={date_time}, elapsed={elapsed}, rc={rc}, thread={thread_name}, msg={rsn}" + elif rc == 1: + job.set_rc(rc) + rsn = "Test case failed with an error." + message = f"Job id={id}, host={hostname}, start={date_time}, elapsed={elapsed}, rc={rc}, thread={thread_name}, msg={rsn}" + elif rc == 2: + job.set_rc(int(rc)) + rsn = "Test case execution was interrupted by the user." + message = f"Job id={id}, host={hostname}, start={date_time}, elapsed={elapsed}, rc={rc}, thread={thread_name}, msg={rsn}" + elif rc == 3: + job.set_rc(int(rc)) + rsn = "Internal error occurred while executing test." + message = f"Job id={id}, host={hostname}, start={date_time}, elapsed={elapsed}, rc={rc}, thread={thread_name}, msg={rsn}" + elif rc == 4: + job.set_rc(int(rc)) + rsn = "Pytest command line usage error." + message = f"Job id={id}, host={hostname}, start={date_time}, elapsed={elapsed}, rc={rc}, thread={thread_name}, msg={rsn}" + elif rc == 5: + job.set_rc(int(rc)) + rsn = "No tests were collected." + message = f"Job id={id}, host={hostname}, start={date_time}, elapsed={elapsed}, rc={rc}, thread={thread_name}, msg={rsn}" + + # Only increment a job failure after evaluating all the RCs + job.increment_failure() + + # Update the node with which jobs failed. A node has all assigned jobs so this ID can be used later for eval. + node.set_failure_job_id(id) + job.set_stdout_and_stderr(message, result.stderr, date_time) + + except subprocess.TimeoutExpired: + node.set_running_job_id(-1) + rc = 9 + job.set_rc(rc) + job.set_elapsed_time(start_time) + elapsed = job.get_elapsed_time() + rsn = f"Job has exceeded subprocess timeout={str(timeout)}" + message = f"Job id={id}, host={hostname}, start={date_time}, elapsed={elapsed}, rc={rc}, thread={thread_name}, msg={rsn}" + job.set_stdout_and_stderr(message, rsn, date_time) + job.increment_failure() + node.set_failure_job_id(id) + else: + # TODO: Is it possible there are no nodes, had an error once but not been able to recreate here. + node.set_running_job_id(-1) + rc = 6 + nodes_count = nodes.len() + node_count_offline = get_nodes_offline_count(nodes) + rsn = f"There are no managed nodes online to run jobs, nodes={nodes_count}, offline={node_count_offline}, online={node_count_online}." + message = f"Job id={id}, host={hostname}, elapsed={job.get_elapsed_time()}, rc={rc}, msg={rsn}" + job.set_stdout_and_stderr(message, rsn, date_time) + job.increment_failure() + node.set_failure_job_id(id) + + return rc, message + + +def runner(jobs: Dictionary, nodes: Dictionary, timeout: int, max: int, bal: int, extra: str, maxnode: int, workers: int, throttle: bool, returncode: bool) -> list[str]: + """ + Method creates an executor to run a job found in the jobs dictionary concurrently. + This method is the key function that allows for concurrent execution of jobs. + + Parameters: + jobs: Dictionary + A dictionary of jobs, the ID is paired to a job. + A job is a test cased designed to be run by pytest. + nodes: Dictionary + Managed nodes that jobs will run on. These are z/OS + managed nodes. + timeout: int + The maximum time in seconds a job should run on z/OS for, + default is 300 seconds. + maxjob: int + The maximum number of times a job can fail before its + disabled in the job queue + bal: int + The count at which a job is balanced from one z/OS node + to another for execution. + extra: str + Extra commands passed to subprocess before pytest execution + maxnode: int + The maximum number of times a node can fail to run a + job before its set to 'offline' in the node queue. + workers: int + The numerical value used to increase the number of worker + threads by proportionally. By default this is 3 that will + yield one thread per node. With one thread per node, test + cases run one at a time on a managed node. This value + is used as a multiple to grow the number of threads and + test concurrency. For example, if there are 5 nodes and + the workers = 3, then 15 threads will be created + resulting in 3 test cases running concurrently. + + Returns: + list[str]: A list of strings, each list entry is describes the jobs lifecycle. + """ + + if workers > 1: + number_of_threads = nodes.len() * workers + else: + number_of_threads = nodes.len() + + result = [] + with ThreadPoolExecutor(number_of_threads,thread_name_prefix='ansible-test') as executor: + futures = [executor.submit(run, key, jobs, nodes, timeout, max, bal, extra, maxnode, throttle) for key, value in jobs.items() if not value.get_successful()] + for future in as_completed(futures): + rc, message = future.result() + if future.exception() is not None: + msg = f"[ERROR] Executor exception occurred with error: {future.exception()}" + result.append(msg) + if not returncode: + print(msg) + elif future.cancelled(): + msg = f"[ERROR] Executor cancelled job, message = {message}" + result.append(msg) + if not returncode: + print(msg) + elif future.done(): + msg = f"[{"INFO" if rc == 0 else "WARN"}] Executor message = {message}" + result.append(msg) + if not returncode: + print(msg) + elif future.running(): + msg = f"[{"INFO" if rc == 0 else "WARN"}] Thread pool is still running = {message}" + result.append(msg) + if not returncode: + print(msg) + + # try: + # for future in as_completed(futures, timeout=200): + # rc, message = future.result() + # print("JOB RC is " + str(rc) + " with message " + message) + # except concurrent.futures.TimeoutError: + # print("this took too long...") + return result + +def elapsed_time(start_time: time): + """ + Given a start time, this will return a formatted string of time matching + pattern HH:MM:SS.SS , eg 00:02:38.36 + + Parameters: + start_time (time): The time the test case has began. This is generally + captured before a test is run. + + Returns: + str: The elapsed time, how long it took a job to run. A string + is returned representing the elapsed time, , eg 00:02:38.36 + """ + + hours, rem = divmod(time.time() - start_time, 3600) + minutes, seconds = divmod(rem, 60) + elapsed = "{:0>2}:{:0>2}:{:05.2f}".format(int(hours),int(minutes),seconds) + return elapsed + +def print_job_logs(log: list[Tuple[str, str, str]], state: State) -> None: + """ + Prints job logs to the console. If State is of type SUCCESS, prints to stdout, + else prints to stderr. + + Parameters: + log (list[Tuple[str, str, str]]): A list of tuples containing job log information. + state (State): The current state of the program + """ + if len(log) > 0: + for entry in log: + msg=f"------------------------------------------------------------\n"\ + f"[START] [{state.string()}] log entry.\n"\ + "------------------------------------------------------------\n"\ + f"\tJob ID: {entry.id}\n"\ + f"\tHostname: {entry.hostname}\n"\ + f"\tDate time: {entry.date_time}\n"\ + f"\tCommand: {entry.command}\n"\ + f"\tMessage: {entry.message}\n"\ + f"\tStdout: \n\t{entry.std_out_err.replace('\n', '\n\t')}\n"\ + "------------------------------------------------------------\n"\ + f"[END] [{state.string()}] log entry.\n"\ + "------------------------------------------------------------" + if state.is_success(): + print(msg) + else: + print(msg,file=sys.stderr) + # sys.stderr.write(msg) + # sys.stderr.flush() + +def print_job_tests(tests: list[str], state: State) -> None: + """ + Prints the test cases for a job. + + Parameters: + tests (list[str]): A list of strings representing the test cases for a job. + state (State): The current state of the job. + """ + + if len(tests) > 0: + msg_header =f"------------------------------------------------------------\n"\ + f"[START] [{state.string()}] test cases.\n"\ + "------------------------------------------------------------" + if state.is_success(): + print(msg_header) + else: + print(msg_header,file=sys.stderr) + + for entry in tests: + if state.is_success(): + print(f"\t{entry}") + else: + print(f"\t{entry}",file=sys.stderr) + + msg_tail = f"------------------------------------------------------------\n"\ + f"[END] [{state.string()}] test cases.\n"\ + "------------------------------------------------------------" + if state.is_success(): + print(msg_tail) + else: + print(msg_tail,file=sys.stderr) + +def write_job_logs_to_html(log: list[Tuple[str, str, str]], state: State, replay: str) -> str: + """ + Prints job logs to an HTML file using the PrettyTable library. + + Parameters: + log (list[Tuple[str, str, str]]): A list of tuples containing job information. + state (State): The current state of the program. + replay (str): A string indicating whether the user wants to replay the logs or not. + + Returns: + str: An HTML string generated from the job logs. + """ + if len(log) > 0: + table = PrettyTable() + table.hrules=ALL + table.format = False + table.header = True + table.left_padding_width = 1 + table.right_padding_width = 1 + table.field_names = ["Count", "Job ID", "z/OS Managed Node", "Pytest Command", "Message", "Standard Out & Error", "Date and Time"] + table.align["Message"] = "l" + table.align["Standard Out & Error"] = "l" + table.sortby = "Job ID" + + count = 0 + for entry in log: + table.add_row([count, entry.id,entry.hostname, entry.command, entry.message, entry.std_out_err, entry.date_time]) + count +=1 + + html = table.get_html_string(attributes={'border': '1', "style":"white-space:nowrap;width:100%;border-collapse: collapse"}) + date_time = datetime.now().strftime("%H:%M:%S") + with open(f"/tmp/concurrent-executor-log-replay-{replay}-{state.string()}-{date_time}.html", "w", encoding="utf-8") as file: + file.write(html) + file.close() + +def write_results_to_file(results: list[str]) -> None: + """ + Write the results of a replay to a file. + + Parameters: + results (list[str]): A list of strings representing the results of each action taken during the replay. + replay (str): The name of the replay. + """ + date_time = datetime.now().strftime("%H:%M:%S") + with open(f"/tmp/concurrent-executor-log-{date_time}.txt", "w", encoding="utf-8") as file: + for result in results: + file.write(f"{result}\n") + file.close() + +def write_job_tests_to_html(tests: list[str], state: State, replay: str) -> None: + """ + Prints job tests to HTML. + + Parameters: + tests (list[str]): A list of test cases. + state (State): The current state of the job. + replay (str): The replay ID of the job. + """ + if len(tests) > 0: + table = PrettyTable() + table.hrules=ALL + table.format = False + table.header = True + table.left_padding_width = 1 + table.right_padding_width = 1 + table.field_names = ["Count", "Test Case"] + table.align["Test Case"] = "l" + table.sortby = "Count" + + count = 0 + for entry in tests: + table.add_row([count, entry]) + count +=1 + + html = table.get_html_string(attributes={'border': '1', "style":"white-space:nowrap;width:100%;border-collapse: collapse"}) + date_time = datetime.now().strftime("%H:%M:%S") + with open(f"/tmp/concurrent-executor-tests-replay-{replay}-{state.string()}-{date_time}.html", "w", encoding="utf-8") as file: + file.write(html) + file.close() + +def print_nodes(nodes: Dictionary) -> list[str]: + """ + Prints the names of all z/OS nodes in the provided dictionary. + + Parameters: + nodes (Dictionary): A dictionary containing z/OS node names as keys and values. + + Returns: + list[str] - A list of strings representing the names of all z/OS nodes in the provided dictionary. + + """ + result = [] + count = 1 + if nodes.len() > 0: + msg = f"[INFO] There are {nodes.len()} managed nodes serving this play." + result.append(msg) + print(msg) + + for key, _ in nodes.items(): + msg = f"[INFO] Node {count} = {key}" + result.append(msg) + print(msg) + count +=1 + return result + +def execute(args) -> int: + """ + This function is responsible for executing the tests on the nodes. It takes in several arguments such as the user, + the tests to be run, the maximum number of times a job can fail, and more. The function returns no value. + + Args: + args (Namespace): A Namespace object containing various arguments passed to the script. + + Returns: + int: The exit code of the executor. + - Non-zero means there was an error during execution and at least one test case has failed. + - Zero return code means all tests cases successfully passed. + + Notes: + The concurrent executor will always produce a textual log in /tmp with this named file pattern + 'concurrent-executor-log-{date_time}.txt'. While there are logs it will produce, those are + selected with the command line options. On non-zero return code, its advised the textual log + be evaluated. + """ + play_result = [] + count_play = 1 + count = 1 + replay = False + return_code = 0 + + while count_play <= args.replay: + message = f"\n=================================================\n[START] PLAY {count_play} {f"of {args.replay} " if args.replay > 1 else ""}started.\n=================================================" + if not args.returncode: + print(message) + play_result.append(message) + + start_time_full_run = time.time() + + # Get a dictionary of all active zos_nodes to run tests on + nodes = get_nodes(user = args.user, zoau = args.zoau, pyz = args.pyz, hostnames = args.hostnames, pythonpath = args.pythonpath, volumes = args.volumes) + if not args.returncode: + play_result.extend(print_nodes(nodes)) + + # Get a dictionary of jobs containing the work to be run on a node. + jobs = get_jobs(nodes, paths=args.paths, skip=args.skip, capture=args.capture, verbosity=args.verbosity, replay=replay) + iterations_result="" + number_of_threads = nodes.len() * args.workers + + stats = get_jobs_statistics(jobs, args.maxjob) + job_count_progress = 0 + while stats.jobs_success_count != stats.jobs_total_count and count <= int(args.itr): + message = f"\n-----------------------------------------------------------\n[START] Thread pool iteration = {count} {f"of {args.itr} " if args.itr > 1 else ""}, pending = {stats.jobs_total_count - stats.jobs_success_count}.\n-----------------------------------------------------------" + play_result.append(message) + if not args.returncode: + print(message) + + start_time = time.time() + play_result.extend(runner(jobs, nodes, args.timeout, args.maxjob, args.bal, args.extra, args.maxnode, args.workers, args.throttle, args.returncode)) + + stats = get_jobs_statistics(jobs, args.maxjob) + iterations_result += f"- Thread pool iteration {count} completed {stats.jobs_success_count - job_count_progress} job(s) in {elapsed_time(start_time)} time, pending {stats.jobs_failed_count} job(s).\n" + + info = f"-----------------------------------------------------------\n[END] Thread pool iteration = {count} {f"of {args.itr} " if args.itr > 1 else ""}, pending = {stats.jobs_failed_count}.\n-----------------------------------------------------------" + play_result.append(info) + if not args.returncode: + print(info) + + count +=1 + job_count_progress = stats.jobs_success_count + + msg = f"\n-----------------------------------------------------------\n[RESULTS] for play {count_play} {f"of {args.replay} " if args.replay > 1 else ""}.\n-----------------------------------------------------------" + play_result.append(msg) + if not args.returncode: + print(msg) + + msg = f"All {count - 1} thread pool iterations completed in {elapsed_time(start_time_full_run)} time, with {number_of_threads} threads running concurrently." + play_result.append(msg) + if not args.returncode: + print(msg) + + if not args.returncode: + print(iterations_result) + play_result.append(iterations_result) + + msg = f"Number of jobs queued to be run = {stats.jobs_total_count}." + play_result.append(msg) + if not args.returncode: + print(msg) + + msg = f"Number of jobs that run successfully = {stats.jobs_success_count}." + play_result.append(msg) + if not args.returncode: + print(msg) + + msg = f"Total number of jobs that failed = {stats.jobs_failed_count}." + play_result.append(msg) + if not args.returncode: + print(msg) + + msg = f"Number of jobs that failed great than or equal to {str(args.maxjob)} times = {stats.jobs_failed_count_maxjob}." + play_result.append(msg) + if not args.returncode: + print(msg) + + msg = f"Number of jobs that failed less than {str(args.maxjob)} times = {stats.jobs_failed_count - stats.jobs_failed_count_maxjob}." + play_result.append(msg) + if not args.returncode: + print(msg) + + msg = f"Number of jobs that were balanced = {stats.jobs_rebalanced_count}." + play_result.append(msg) + if not args.returncode: + print(msg) + + message = f"\n=================================================\n[END] PLAY {count_play} {f"of {args.replay} " if args.replay > 1 else ""}ended.\n=================================================" + play_result.append(message) + if not args.returncode: + print(msg) + + # ---------------------------------------------- + # Print each play to STDOUT and/or write results. + # ---------------------------------------------- + if args.verbose: + # Print to stdout any failed test cases and their relevant pytest logs + print_job_tests(stats.jobs_failed_tests, State.FAILURE) + print_job_logs(stats.jobs_failed_log, State.FAILURE) + # Print to stdout any test cases that exceeded the value max number of times a job can fail. + print_job_tests(stats.jobs_failed_maxjob_tests, State.EXCEEDED) + print_job_logs(stats.jobs_failed_maxjob_log, State.EXCEEDED) + # Print to stdout all successful test cases and their relevant logs. + print_job_tests(stats.jobs_success_tests, State.SUCCESS) + print_job_logs(stats.jobs_success_log, State.SUCCESS) + + # Print to HTML any failed test cases and their relevant pytest logs + write_job_tests_to_html(stats.jobs_failed_tests, State.FAILURE, count_play) + write_job_logs_to_html(stats.jobs_failed_log, State.FAILURE, count_play) + + # Print to HTML any test cases that exceeded the value max number of times a job can fail. + write_job_tests_to_html(stats.jobs_failed_maxjob_tests, State.EXCEEDED, count_play) + write_job_logs_to_html(stats.jobs_failed_maxjob_log, State.EXCEEDED, count_play) + + # Print to HTML all successful test cases and their relevant logs. + write_job_tests_to_html(stats.jobs_success_tests, State.SUCCESS, count_play) + write_job_logs_to_html(stats.jobs_success_log, State.SUCCESS, count_play) + + # If replay, repeat concurrent executor with failed tests only, else advance count_play and end the program + if stats.jobs_failed_count > 0: + args.paths = ','.join(stats.jobs_failed_tests) + count_play +=1 + count = 1 + replay = True + # return_code = 1 + return_code = stats.jobs_failed_count + else: + count_play = args.replay + 1 + + # Print the cumulative result of all plays to a file + write_results_to_file(play_result) + + return return_code + +def main(): + """ Main """ + parser = argparse.ArgumentParser( + prog='ce.py', + formatter_class=argparse.RawDescriptionHelpFormatter, + description=textwrap.dedent(''' + Examples + -------- + 1) Execute a single test suite for up to 5 iterations for ibmuser with shared zoau and python installations. + Note, usage of --tests "../tests/functional/modules/test_zos_tso_command_func.py" + $ python3 ce.py\\ + --pyz "/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz"\\ + --zoau "/zoau/v1.3.1"\\ + --itr 5\\ + --tests "../tests/functional/modules/test_zos_tso_command_func.py"\\ + --user "ibmuser"\\ + --timeout 100 + + 2) Execute a multiple test suites for up to 10 iterations for ibmuser with shared zoau and python installations. + Note, usage of --tests "../tests/functional/modules/test_zos_tso_command_func.py,../tests/functional/modules/test_zos_find_func.py" + $ python3 ce.py\\ + --pyz "/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz"\\ + --zoau "/zoau/v1.3.1"\\ + --itr 10\\ + --tests "../tests/functional/modules/test_zos_tso_command_func.py,../tests/functional/modules/test_zos_find_func.py"\\ + --user "ibmuser"\\ + --timeout 100 + + 3) Execute a test suites in a directory for up to 4 iterations for ibmuser with shared zoau and python installations. + Note, usage of --directories "../tests/functional/modules/,../tests/unit/" + $ python3 ce.py\\ + --pyz "/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz"\\ + --zoau "/zoau/v1.3.1"\\ + --itr 4\\ + --directories "../tests/functional/modules/,../tests/unit/"\\ + --user "ibmuser"\\ + --timeout 100 + + 4) Execute test suites in multiple directories for up to 5 iterations for ibmuser with shared zoau and python installations. + Note, usage of "--directories "../tests/functional/modules/,../tests/unit/" + $ python3 ce.py\\ + --pyz "/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz"\\ + --zoau "/zoau/v1.3.1"\\ + --itr 5\\ + --directories "../tests/functional/modules/,../tests/unit/"\\ + --user "ibmuser"\\ + --timeout 100\\ + --max 6\\ + --bal 3 + + 5) Execute test suites in multiple directories with up to 5 iterations for ibmuser with attributes, zoau, pyz using a max timeout of 100, max failures of 6 and balance of 3. + Note, usage of "--directories "../tests/functional/modules/,../tests/unit/" + $ python3 ce.py\\ + --pyz "/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz"\\ + --zoau "/zoau/v1.3.1"\\ + --itr 5\\ + --directories "../tests/functional/modules/,../tests/unit/"\\ + --user "ibmuser"\\ + --timeout 100\\ + --maxjob 6\\ + --bal 3\\ + --maxnode 4\\ + --hostnames "ec33025a.vmec.svl.ibm.com,ec33025a.vmec.svl.ibm"\\ + --verbosity 3\\ + --capture\\ + --workers 3\\ + --extra "cd .." + + python3 ce.py\\ + --pyz "/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz"\\ + --zoau "/zoau/v1.3.1"\\ + --itr 3\\ + --paths "/Users/ddimatos/git/gh/ibm_zos_core/tests/functional/modules/test_load_balance.py.py"\\ + --user "omvsadm"\\ + --extra "cd .."\\ + --maxnode 5\\ + --verbosity 1\\ + --no-capture\\ + --workers 1\\ + --maxjob 10\\ + --hostnames "ec01130a.vmec.svl.ibm.com"\\ + --timeout 300\\ + --replay 2\\ + --bal 2\\ + --volumes "222222,000000"\\ + --pythonpath "/zoau/v1.3.1/lib/3.10"\\ + --no-verbose\\ + --no-throttle + ''')) + + # Options + parser.add_argument('--extra', type=str, help='Extra commands passed to subprocess before pytest execution', required=False, metavar='<str>', default="") + parser.add_argument('--pyz', type=str, help='Python Z home directory.', required=True, metavar='<str,str>', default="/usr/lpp/python") + parser.add_argument('--zoau', type=str, help='ZOAU home directory.', required=True, metavar='<str,str>', default="/usr/lpp/zoau") + parser.add_argument('--itr', type=int, help='How many iterations to run CE, each iteration runs only failed tests, exits early if there are no tests to run, default = 12.', required=True, metavar='<int>', default="12") + parser.add_argument('--skip', type=str, help='Skip test suites.', required=False, metavar='<str,str>', default="") + parser.add_argument('--user', type=str, help='Ansible user authorized to run tests on the managed node.', required=False, metavar='<str>', default="") + parser.add_argument('--timeout', type=int, help='The maximum time in seconds a job should wait for completion, default = 300.', required=False, metavar='<int>', default="300") + parser.add_argument('--maxjob', type=int, help='The maximum number of times a job can fail before its removed from the job queue.', required=False, metavar='<int>', default="10") + parser.add_argument('--bal', type=int, help='The failure count at which a job is assigned to a new managed node, default = 5 .', required=False, metavar='<int>', default="5") + parser.add_argument('--hostnames', help='List of managed nodes to use, overrides the auto detection, must be a comma delimited string.', required=False, metavar='<str,str,str>', default=None, nargs='*') + parser.add_argument('--maxnode', type=int, help='The maximum number of test failures permitted for a managed node before the node is set to can fail to \'offline\' in the node queue, default = 10.', required=False, metavar='<int>', default=10) + parser.add_argument('--verbosity', type=int, help='The pytest verbosity level to use, 1 = -v, 2 = -vv, 3 = -vvv, 4 = -vvvv, default = 0.', required=False, metavar='<int>', default=0) + parser.add_argument('--capture', action=argparse.BooleanOptionalAction, help='Instruct Pytest whether to capture any output, equivalent of pytest -s, default = --no-capture.', required=False, default=False) + parser.add_argument('--workers', type=int, help='The numerical multiplier used to increase the number of worker threads, this is multiplied by the managed nodes to calculate threadsf.', required=False, metavar='<int>', default=1) + parser.add_argument('--replay', type=int, help='This value will instruct the tool to replay the entire command for only the failed test cases.', required=False, metavar='<int>', default=1) + parser.add_argument('--pythonpath', type=str, help='Absolute path to the ZOAU Python modules, precompiled or wheels.', required=True, metavar='<str>', default="") + parser.add_argument('--volumes', type=str, help='The volumes to use with the test cases, overrides the auto volume assignment.', required=False, metavar='<str,str>', default="222222,000000") + parser.add_argument('--verbose', action=argparse.BooleanOptionalAction, help='Enables verbose stdout, default = --no-verbose.', required=False, default=False) + parser.add_argument('--throttle', action=argparse.BooleanOptionalAction, help='Enables managed node throttling such that a managed node will only execute one job at at time, no matter the threads, default --throttle', required=False, default=True) + parser.add_argument('--paths', type=str, help='Test paths', required=True, metavar='<str,str>', default="") + parser.add_argument('--returncode', action=argparse.BooleanOptionalAction, help='RC only, --returncode, --no-returncode', required=False, default=False) + + + args = parser.parse_args() + # A replay of 0 will result in no execution of CE + if args.replay <=0: + raise ValueError(f"Value '--replay' = {args.replay}, must be greater than or equal to 1.") + + # If workers is > 1, throttle should be disabled else the workers would not be running concurrently. + if args.workers > 1: + args.throttle = "--no--throttle" + + # Evaluate + # Maxjob should always be less than itr else it makes no sense + # if int(args.maxjob) > int(args.itr): + # raise ValueError(f"Value '--maxjob' = {args.maxjob}, must be less than --itr = {args.itr}, else maxjob will have no effect.") + + if int(args.bal) > int(args.maxjob): + raise ValueError(f"Value '--bal' = {args.bal}, must be less than --maxjob = {args.itr}, else balance will have no effect.") + + # Execute/begin running the concurrency testing with the provided args. + rc = execute(args) + if args.returncode: + print(rc) + return rc + +if __name__ == '__main__': + main() diff --git a/scripts/requirements-2.11.env b/scripts/configurations/requirements-2.11.env similarity index 100% rename from scripts/requirements-2.11.env rename to scripts/configurations/requirements-2.11.env diff --git a/scripts/requirements-2.12.env b/scripts/configurations/requirements-2.12.env similarity index 98% rename from scripts/requirements-2.12.env rename to scripts/configurations/requirements-2.12.env index 229e4edcb..4a0516fc4 100644 --- a/scripts/requirements-2.12.env +++ b/scripts/configurations/requirements-2.12.env @@ -25,7 +25,7 @@ requirements=( "ansible-core:2.12.10" "pylint" "rstcheck" -"ansible-lint:6.22.1" +"ansible-lint:24.7.0" ) python=( diff --git a/scripts/requirements-2.13.env b/scripts/configurations/requirements-2.13.env similarity index 98% rename from scripts/requirements-2.13.env rename to scripts/configurations/requirements-2.13.env index 4720e9352..7923ad23a 100644 --- a/scripts/requirements-2.13.env +++ b/scripts/configurations/requirements-2.13.env @@ -25,7 +25,7 @@ requirements=( "ansible-core:2.13.13" "pylint" "rstcheck" -"ansible-lint:6.22.1" +"ansible-lint:24.7.0" ) python=( diff --git a/scripts/requirements-2.14.env b/scripts/configurations/requirements-2.14.env similarity index 96% rename from scripts/requirements-2.14.env rename to scripts/configurations/requirements-2.14.env index 40a80dbf2..531ad47f2 100644 --- a/scripts/requirements-2.14.env +++ b/scripts/configurations/requirements-2.14.env @@ -22,10 +22,10 @@ # ============================================================================== requirements=( -"ansible-core:2.14.16" +"ansible-core:2.14.17" "pylint" "rstcheck" -"ansible-lint:6.22.1" +"ansible-lint:24.7.0" ) python=( diff --git a/scripts/requirements-2.15.env b/scripts/configurations/requirements-2.15.env similarity index 96% rename from scripts/requirements-2.15.env rename to scripts/configurations/requirements-2.15.env index 4ca546686..149819def 100644 --- a/scripts/requirements-2.15.env +++ b/scripts/configurations/requirements-2.15.env @@ -22,10 +22,10 @@ # ============================================================================== requirements=( -"ansible-core:2.15.11" +"ansible-core:2.15.12" "pylint" "rstcheck" -"ansible-lint:6.22.1" +"ansible-lint:24.7.0" ) python=( diff --git a/scripts/requirements-2.16.env b/scripts/configurations/requirements-2.16.env similarity index 96% rename from scripts/requirements-2.16.env rename to scripts/configurations/requirements-2.16.env index 050c27aca..77e8990ee 100644 --- a/scripts/requirements-2.16.env +++ b/scripts/configurations/requirements-2.16.env @@ -22,10 +22,10 @@ # ============================================================================== requirements=( -"ansible-core:2.16.3" +"ansible-core:2.16.9" "pylint" "rstcheck" -"ansible-lint:6.22.1" +"ansible-lint:24.7.0" ) python=( diff --git a/scripts/requirements-2.17.env b/scripts/configurations/requirements-2.17.env similarity index 96% rename from scripts/requirements-2.17.env rename to scripts/configurations/requirements-2.17.env index c61c03626..c0a7373db 100644 --- a/scripts/requirements-2.17.env +++ b/scripts/configurations/requirements-2.17.env @@ -22,10 +22,10 @@ # ============================================================================== requirements=( -"ansible-core:2.17.0b1" +"ansible-core:2.17.2" "pylint" "rstcheck" -"ansible-lint:6.22.2" +"ansible-lint:24.7.0" ) python=( diff --git a/scripts/requirements-2.9.env b/scripts/configurations/requirements-2.9.env similarity index 100% rename from scripts/requirements-2.9.env rename to scripts/configurations/requirements-2.9.env diff --git a/scripts/requirements-common.env b/scripts/configurations/requirements-common.env similarity index 68% rename from scripts/requirements-common.env rename to scripts/configurations/requirements-common.env index 5f76436bf..8c787701b 100644 --- a/scripts/requirements-common.env +++ b/scripts/configurations/requirements-common.env @@ -12,14 +12,20 @@ # limitations under the License. # ============================================================================== -# ============================================================================== -# File name must adhere to reqs-common.sh and not change. This supplies the -# venv's with additional packages for use by the developement work flow. -# ============================================================================== +# ------------------------------------------------------------------------------ +# Description: Supplies venv's with additional packages for use by the AC. +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# Note: +# ------------------------------------------------------------------------------ +# (1) "pylint", "rstcheck", "six", "voluptuous", "yamllint" is a common package +# but the requirements-xx.env have it frozen to each of their specific needs +# (2) Package "cryptography:42.0.8" is frozen becasue of this warning message +# reported with a pending PR. https://github.com/paramiko/paramiko/issues/2419 +# In time, after the above PR merges, the latest can be evaluated. +# ------------------------------------------------------------------------------ -# Notes, "pylint", "rstcheck", "six", "voluptuous", "yamllint" is common but -# various requirements.txt have it frozen so it becomes a double requement -# error if present here as well. requirements=( "alabaster" "ansible-builder" @@ -39,7 +45,7 @@ requirements=( "cffi" "charset-normalizer" "click" -"cryptography" +"cryptography:42.0.8" "dill" "distlib" "distro" @@ -116,13 +122,18 @@ requirements=( "webencodings" "wrapt" "zipp" +"paramiko" +"prettytable" ) -# This original list caused some issues with pytest seeing our conftest plugin +# This reduced list caused some issues with pytest seeing our conftest plugin # as already registered, the only time senstive solution I could come up with # was to pip freeze a working venv and use that as the common base for now, over # time, using pip show <package> on each of these packages to figure out why # this occurs or maybe using pipdeptree will visually help. +# -------------------- +# Reduced list below: +# -------------------- # requirements=( # "bandit" # "pipdeptree" diff --git a/scripts/requirements-latest.env b/scripts/configurations/requirements-latest.env similarity index 100% rename from scripts/requirements-latest.env rename to scripts/configurations/requirements-latest.env diff --git a/scripts/hosts.env b/scripts/hosts.env index 58075263d..0a5ccc70e 100644 --- a/scripts/hosts.env +++ b/scripts/hosts.env @@ -22,68 +22,68 @@ # fi # fi -host_list_str="ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ -"richp:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ketan:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ -"iamorenosoto:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ -"fernando:ec01135a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01105a:ec01105a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01129a:ec01129a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01130a:ec01130a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01131a:ec01131a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01132a:ec01132a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01133a:ec01133a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01134a:ec01134a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01135a:ec01135a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01136a:ec01136a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01137a:ec01137a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01138a:ec01138a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01139a:ec01139a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01145a:ec01145a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01146a:ec01146a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01147a:ec01147a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01148a:ec01148a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01149a:ec01149a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01152a:ec01152a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01153a:ec01153a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec01154a:ec01154a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec03071a:ec03071a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec03102a:ec03102a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec03127a:ec03127a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec03129a:ec03129a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec03173a:ec03173a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec03175a:ec03175a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec32016a:ec32016a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec32024a:ec32024a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec32051a:ec32051a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33002a:ec33002a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33003a:ec33003a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33004a:ec33004a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33005a:ec33005a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33007a:ec33007a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33008a:ec33008a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33009a:ec33009a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33010a:ec33010a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33011a:ec33011a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33012a:ec33012a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33014a:ec33014a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33015a:ec33015a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33016a:ec33016a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33017a:ec33017a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33018a:ec33018a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33019a:ec33019a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33020a:ec33020a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33021a:ec33021a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33022a:ec33022a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33023a:ec33023a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33024a:ec33024a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33025a:ec33025a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33026a:ec33026a${HOST_SUFFIX}:${USER}:${PASS} "\ -"ec33027a:ec33027a${HOST_SUFFIX}:${USER}:${PASS} " +host_list_str="ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"richp:ec01132a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ketan:ec33018a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"iamorenosoto:ec01134a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"fernando:ec01135a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01105a:ec01105a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01129a:ec01129a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01130a:ec01130a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01131a:ec01131a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01132a:ec01132a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01133a:ec01133a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01134a:ec01134a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01135a:ec01135a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01136a:ec01136a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01137a:ec01137a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01138a:ec01138a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01139a:ec01139a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01145a:ec01145a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01146a:ec01146a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01147a:ec01147a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01148a:ec01148a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01149a:ec01149a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01152a:ec01152a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01153a:ec01153a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01154a:ec01154a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03071a:ec03071a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec03102a:ec03102a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec03127a:ec03127a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec03129a:ec03129a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec03173a:ec03173a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec03175a:ec03175a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec32016a:ec32016a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec32024a:ec32024a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec32051a:ec32051a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33002a:ec33002a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33003a:ec33003a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33004a:ec33004a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33005a:ec33005a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33007a:ec33007a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33008a:ec33008a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33009a:ec33009a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33010a:ec33010a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33011a:ec33011a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33012a:ec33012a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33014a:ec33014a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33015a:ec33015a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33016a:ec33016a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33017a:ec33017a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33018a:ec33018a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33019a:ec33019a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33020a:ec33020a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33021a:ec33021a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33022a:ec33022a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec33023a:ec33023a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33024a:ec33024a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec33025a:ec33025a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec33026a:ec33026a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec33027a:ec33027a${HOST_SUFFIX}:${USER}:${PASS}:development " diff --git a/scripts/info.env.axx b/scripts/info.env.axx index b2e6308fb..c756e6dfe 100755 --- a/scripts/info.env.axx +++ b/scripts/info.env.axx @@ -1,15 +1,15 @@ -U2FsdGVkX18VoSEji2kkFCFNcDHC1mzJ+hUulvTheU5dL9E/lmDWS6qdk8R1VCPJ -WyRU1Zefxvc1fw/sqvmzliUgBXXN6dOgRv73+ap8vyp+IvUhCVAZl9efFXHZ2Eag -6loROID0Qq28Bd+5Btqk/JuC6az9QvnV1E4MRhmZBtCIJ8P/joXKIigR9KHGvL0N -7PpA20UxvMzSH/vQSFd0zkuuvjAAzxN/AVO3W0Jbw1fmHy0gqp4TxidqXF0JatdC -YtDadHqyGHCid3hDP0+GwS4yCSEL/uNEE1e3Npe5EF52YB1OE5y7WqJFmQT1OdNd -pkpPok73YNyPtetMBzIr6t3BcnXHL1j38lrDcMZvBy9RWQ2LQiSxmRiGanEg+i9L -SBapVYDJJ64eKZd7T7gY4gViytT0/i6IAqgGqoH0Dk9LQnGmQ7bOqi34zOna/iC2 -PFzx8XFZF/BmXQm3/96xJsRv15IMKCRp2t9lha0N/FRVmEYp7n5loi6oj5hCtD5k -CV1nbzO9cvMH1c85LUeWjTfcEmTA0criSCiBY3zLywrBvs6XsV6EkITMjPh1K2ht -AHXVPykPHhG6+F0LPYS4gasc0jLRTCxVyPRrl3tSf5aGvvo7ilsZrUtVh2UKUkuN -bjpUHCsrsV17LZUb5fWbY3B0EB1NxHa2rO3cb0ausUd+Mf+02SlnPsnaxjX7lTna -ymUlYs6oQcfAfhHM1mCf8miS4ES2HBdl9Urk9BiepSRJudoaBjIL/L9IsaInYpdv -BfW04gocwKJOhhGUE5ql4+DBfoCrWbz4bIGlUSfEIdFiRmsHG8723JQXgq19c4il -oFe7inTT14QHNsI7JNWmDDxsBPkItgJJ00JR+WwZd77jDTHJhlXuf8lYevQCRKla -BDZ3DlqvbK2ILBWFz6XTjPdlNu2fYsxlW4R5kLKsTyI= +U2FsdGVkX18M4UEJJX9P8cXv2ySLpbNCQ4Jf2++13npBAMHEIow8TQCKBeyiBfTa +lk6ALYivmT0ktGLtvS+/zEIBnvh1Gq2kH3GO7AQkg6JQxla8EpS9b7RpFCNW0XpY +f6+sSBpUxIqCSlE7mufKxIPMe3lNVL7F2eRqiOj87kul6zoMrOp+FSQ/y8pF97+H +xd35tygalQmsGUYdZ9F143/cYUy3t5m85AUJOD/R6a2u7OFlhLcqWYhw9kzduThA +sauoYKeiNlnRJ5mNq87tQXWCYWquxBk+TXrQXZ2qxmR0Njz3q2aIPcPZzxyK5dNu +8DdO0Ya5qAVWD685CnHrpJ9lbDGkUyEz6qXkaNdUkxsgvWHCQIwZlRfe+Tpggt9M +D2D/q06zoHmsJ3cnfpSLFSjupLoc56wmMe2dmK5kDKvKqnqFi546Du9+/xq22X+x +gwg3/S3fU2CVp89dQKWazpR9U5k7FAZzZ2lyZ3ZpPknAOoNWM+sgaczJbkKjnIeV +7agLgTzasD/bNyIOmM9NNGYEN4AHHV8iKxpbr7swl4Kfjn+l+DBidGd/L/I5Mtu6 +USiVkOF5LQzqi+Dmdf6Yk8CsRp8wh+hFCVTp5oFs0oirSqEv/BXdWWUPfER5yZUn +K+HvjElcMK3nrvPb3SfdhmonvQsRbH80Ju5i3/vWWzA9+WN88aEak4shjG1j51gU +q0Asm3qHtb0CdsFJwNMbwR8gelhUZWErC6o/APcCYwTp364Up4aIrULfsBG4CwTf +fsvqzAJIoiV2vF6wgYUC3gOzbhLWLwlUPTbS0z4xbYt36uhSniUv868c1FfTNhcm +D1o8OGq2yiJj65jHq038TjLfRkpc1ov3CRWSrYfved5U7dLBgGOZ4dBOhtCRBMn7 +/pi6FVPc8HIRFlO6ubN9UIv54MGBItJT1T+7Ie4HQTw= diff --git a/scripts/modules/connection.py b/scripts/modules/connection.py new file mode 100644 index 000000000..30dbcdd7e --- /dev/null +++ b/scripts/modules/connection.py @@ -0,0 +1,201 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +Connection class wrapping paramiko. The wrapper provides methods allowing +for remote environment variables be set up so that they can interact with +ZOAU. Generally this is a wrapper to paramiko to be used to write files to +z/OS. For example, consider writing a file that is managed by this load +balancer that can know if there is another test running. The idea was to +update our pytest fixture to look for that file so that a remote pipeline or +other instance does not try to run concurrently until the functional tests +can be properly vetted out for concurrent support. +Usage: + key_file_name = os.path.expanduser('~') + "/.ssh/id_dsa" + connection = Connection(hostname="ibm.com", username="root", key_filename=key_file_name) + client = connection.connect() + result = connection.execute(client, "ls") + print(result) +""" + +# pylint: disable=too-many-instance-attributes, too-many-arguments + +from socket import error +from paramiko import SSHClient, AutoAddPolicy, BadHostKeyException, \ + AuthenticationException, SSHException, ssh_exception + +class Connection: + """ + Connection class wrapping paramiko. The wrapper provides methods allowing + for remote environment variables be set up so that they can interact with + ZOAU. Generally this is a wrapper to paramiko to be used to write files to + z/OS. For example, consider writing a file that is managed by this load + balancer that can know if there is another test running. The idea was to + update our pytest fixture to look for that file so that a remote pipeline or + other instance does not try to run concurrently until the functional tests + can be properly vetted out for concurrent support. + + Usage: + key_file_name = os.path.expanduser('~') + "/.ssh/id_dsa" + connection = Connection(hostname="ibm.com", username="root", key_filename=key_file_name) + client = connection.connect() + result = connection.execute(client, "ls") + print(result) + """ + + def __init__(self, hostname, username, password = None, key_filename = None, + passphrase = None, port=22, environment= None ): + self._hostname = hostname + self.port = port + self._username = username + self.password = password + self.key_filename = key_filename + self.passphrase = passphrase + self.environment = environment + self.env_str = "" + if self.environment is not None: + self.env_str = self.set_environment_variable(**self.environment) + + + def __to_dict(self) -> str: + """ + Method returns constructor arguments to a dictionary, must remain private to + protect credentials. + """ + temp = { + "hostname": self._hostname, + "port": self.port, + "username": self._username, + "password": self.password, + "key_filename": self.key_filename, + "passphrase": self.passphrase, + } + + for k,v in dict(temp).items(): + if v is None: + del temp[k] + return temp + + def connect(self) -> SSHClient: + """ + Create the connection after the connection class has been initialized. + + Return + SSHClient: paramiko SSHClient, client used the execution of commands. + + Raises: + BadHostKeyException + AuthenticationException + SSHException + FileNotFoundError + error + """ + ssh = None + + n = 0 + while n <= 10: + try: + ssh = SSHClient() + ssh.set_missing_host_key_policy(AutoAddPolicy()) + ssh.connect(**self.__to_dict(), disabled_algorithms= + {'pubkeys': ['rsa-sha2-256', 'rsa-sha2-512']}) + except BadHostKeyException as e: + print('Host key could not be verified.', str(e)) + raise e + except AuthenticationException as e: + print('Authentication failed.', str(e)) + raise e + except ssh_exception.SSHException as e: + print(e, str(e)) + raise e + except FileNotFoundError as e: + print('Missing key filename.', str(e)) + raise e + except error as e: + print('Socket error occurred while connecting.', str(e)) + raise e + return ssh + + def execute(self, client, command): + """ + Parameters: + client (paramiko SSHClient) SSH Client created through connection.connect() + command (str): command to run + + Returns: + dict: a dictionary with stdout, stderr and command executed + + Raises + SSHException + """ + + response = None + get_pty_bool = True + out = "" + try: + # We may need to create a channel and make this synchronous + # but get_pty should help avoid having to do that + (_, stdout, stderr) = client.exec_command(self.env_str+command, get_pty=get_pty_bool) + + if get_pty_bool is True: + out = stdout.read().decode().strip('\r\n') + error_msg = stderr.read().decode().strip('\r\n') + else: + out = stdout.read().decode().strip('\n') + error_msg = stderr.read().decode().strip('\n') + + # Don't shutdown stdin, we are reusing this connection in the services instance + # client.get_transport().open_session().shutdown_write() + + response = {'stdout': out, + 'stderr': error_msg, + 'command': command + } + + except SSHException as e: + # if there was any other error connecting or establishing an SSH session + print(e) + finally: + client.close() + + return response + + def set_environment_variable(self, **kwargs): + """ + Provide the connection with environment variables needed to be exported + such as ZOAU env vars. + + Example: + env={"_BPXK_AUTOCVT":"ON", + "ZOAU_HOME":"/zoau/v1.2.0f", + "PATH":"/zoau/v1.2.0f/bin:/python/usr/lpp/IBM/cyp/v3r8/pyz/bin:/bin:.", + "LIBPATH":"/zoau/v1.2.0f/lib:/lib:/usr/lib:.", + "PYTHONPATH":"/zoau/v1.2.0f/lib", + "_CEE_RUNOPTS":"FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)", + "_TAG_REDIR_ERR":"txt", + "_TAG_REDIR_IN":"txt", + "_TAG_REDIR_OUT":"txt", + "LANG":"C" + } + connection = Connection(hostname="ibm.com", username="root", + key_filename=key_filename, environment=env) + + """ + env_vars = "" + export="export" + if kwargs is not None: + for key, value in kwargs.items(): + env_vars = f"{env_vars}{export} {key}=\"{value}\";" + return env_vars diff --git a/scripts/modules/utils.py b/scripts/modules/utils.py new file mode 100644 index 000000000..4315cc9c4 --- /dev/null +++ b/scripts/modules/utils.py @@ -0,0 +1,160 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Utility classes +""" + +# pylint: disable=too-many-locals, modified-iterating-list, too-many-nested-blocks +# pylint: disable=too-many-branches, too-many-statements, line-too-long + +from pathlib import Path +import subprocess + + +def get_test_cases(paths: str, skip: str = None) -> list[str]: + """ + Returns a list of test cases suitable for pytest to execute. Can discover test + cases from either a directory of test suites or a list of test suites. Will also + remove any skipped tests if specified , wether a directory or a specific test. + + Parameters: + paths (str): Absolute path of directories containing test suites or absolute + path of individual test suites comma or space delimited. + A directory of test cases is such that it contains test suites. + A test suite is a collection of test cases in a file that starts with + 'test' and ends in '.py'. + skip (str): (Optional) Absolute path of either test suites, or test cases. + Test cases can be parametrized such they use the '::' syntax or not. + Skip does not support directories. + + Returns: + list[str] A list of strings containing a modified path to each test suite. + The absolute path is truncated to meet the needs of pytest which starts at + the `tests` directory. + + Raises: + FileNotFoundError : If a test suite, test case or skipped test cannot be found. + ValueError: If paths is not provided. + + Examples: + Example collects all test cases for test suites`test_zos_job_submit_func.py` , `test_zos_copy_func.py`, all + unit tests in directory `tests/unit/` then skips all tests in test suite `test_zos_copy_func.py` + (for demonstration) and parametrized tests `test_zos_backup_restore_unit.py::test_invalid_operation[restorE]` + and test_zoau_version_checker_unit.py::test_is_zoau_version_higher_than[True-sys_zoau1-1.2.1]/ + - get_test_cases(paths="/Users/ddimatos/git/gh/ibm_zos_core/tests/functional/modules/test_zos_job_submit_func.py,\\ + /Users/ddimatos/git/gh/ibm_zos_core/tests/functional/modules/test_zos_copy_func.py,\\ + /Users/ddimatos/git/gh/ibm_zos_core/tests/unit/",\\ + skip="/Users/ddimatos/git/gh/ibm_zos_core/tests/functional/modules/test_zos_copy_func.py,\\ + /Users/ddimatos/git/gh/ibm_zos_core/tests/unit/test_zos_backup_restore_unit.py::test_invalid_operation[restorE],\\ + /Users/ddimatos/git/gh/ibm_zos_core/tests/unit/test_zoau_version_checker_unit.py::test_is_zoau_version_higher_than[True-sys_zoau1-1.2.1]") + """ + + files =[] + parametrized_test_cases = [] + parametrized_test_cases_filtered_test_suites = [] + parametrized_test_cases_filtered_test_cases = [] + parameterized_tests = [] + ignore_test_suites = [] + ignore_test_cases = [] + + # Remove whitespace and replace CSV with single space delimiter. + # Build a command that will yield all test cases including parametrized tests. + cmd = ['pytest', '--collect-only', '-q'] + if paths: + files = " ".join(paths.split()) + files = files.strip().replace(',', ' ').split() + + for file in files: + file_path = Path(file) + try: + file_path.resolve(strict=True) + except FileNotFoundError as e: + raise FileNotFoundError(f'{file_path} does not exist.') from e + cmd.extend(files) + else: + raise ValueError("Required files have not been provided.") + + cmd.append('| grep ::') + cmd_str = ' '.join(cmd) + + # Run the pytest collect-only command and grep on '::' so to avoid warnings + parametrized_test_cases = subprocess.run([cmd_str], shell=True, capture_output=True, text=True, check=False) + # Remove duplicates in case test_suites or test_directories were repeated + parametrized_test_cases = list(set(parametrized_test_cases.stdout.split('\n'))) + # Remove the trailing line feed from the list else it will leave an empty list index and error. + parametrized_test_cases = list(filter(None, parametrized_test_cases)) + + # Skip can take any input, but note that test suites which start with 'test' and in in `.py` + # will supersede individual test cases. That is because if a test suite is being skipped it + # it should remove all test cases that match that test suite, hence the skipped are put into + # two buckets, 'ignore_test_cases' and 'ignore_test_suites' and 'ignore_test_suites' is evaluated + # first. + if skip: + skip=" ".join(skip.split()) + skip = skip.strip().replace(',', ' ').split() + for skipped in skip: + if '::' in skipped: # it's a test case + skipped_path = Path(skipped.split('::')[0]) + try: + skipped_path.resolve(strict=True) + except FileNotFoundError as e: + raise FileNotFoundError(f'{file_path} does not exist.') from e + # Only retain the sub-str because that is what pytest collect-only will yield + skipped = skipped.split("tests/")[1] + ignore_test_cases.append(skipped) + + if skipped.endswith('.py'): # it's a test suite + skipped_path = Path(skipped) + try: + skipped_path.resolve(strict=True) + except FileNotFoundError as e: + raise FileNotFoundError(f'{file_path} does not exist.') from e + # Only retain the sub-str because that is what pytest collect-only will yield + skipped = skipped.split("tests/")[1] + ignore_test_suites.append(skipped) + + # pytest --ignore,--deselect did not work as expected, will manually replicate the functionality + # If a path is in ignore_test_suites, it supersedes any ignore_test_cases substrings. + if len(ignore_test_suites) > 0: + parametrized_test_cases_filtered_test_suites = [p for p in parametrized_test_cases if all(t not in p for t in ignore_test_suites)] + if len(ignore_test_cases) > 0: + parametrized_test_cases_filtered_test_cases = [p for p in parametrized_test_cases if all(t not in p for t in ignore_test_cases)] + + if len(parametrized_test_cases_filtered_test_suites) > 0 and len(parametrized_test_cases_filtered_test_cases) > 0: + parametrized_test_cases_filtered_test_suites.extend(parametrized_test_cases_filtered_test_cases) + elif len(parametrized_test_cases_filtered_test_cases) > 0: + parameterized_tests = [f"tests/{parametrized}" for parametrized in parametrized_test_cases_filtered_test_cases] + + parameterized_tests = [f"tests/{parametrized}" for parametrized in parametrized_test_cases_filtered_test_suites] + return parameterized_tests + + parameterized_tests = [f"tests/{parametrized}" for parametrized in parametrized_test_cases] + + return parameterized_tests + +# Some adhoc testing until some test cases can be structured. +# def main(): +# print("Main") +# # plist = get_test_cases(paths="/Users/ddimatos/git/gh/ibm_zos_core/tests/functional/modules/test_zos_job_submit_func.py,\ +# # /Users/ddimatos/git/gh/ibm_zos_core/tests/functional/modules/test_zos_copy_func.py,\ +# # /Users/ddimatos/git/gh/ibm_zos_core/tests/unit/",\ +# # skip="/Users/ddimatos/git/gh/ibm_zos_core/tests/functional/modules/test_zos_copy_func.py,\ +# # /Users/ddimatos/git/gh/ibm_zos_core/tests/unit/test_zos_backup_restore_unit.py::test_invalid_operation[restorE],\ +# # /Users/ddimatos/git/gh/ibm_zos_core/tests/unit/test_zoau_version_checker_unit.py::test_is_zoau_version_higher_than[True-sys_zoau1-1.2.1]") +# # plist = get_test_cases(paths="/Users/ddimatos/git/gh/ibm_zos_core/tests/unit/") +# plist = get_test_cases(paths="/Users/ddimatos/git/gh/ibm_zos_core/tests/functional/modules/test_zos_tso_command_func.py,/Users/ddimatos/git/gh/ibm_zos_core/tests/functional/modules/test_zos_operator_func.py") +# print(str(plist)) +# if __name__ == '__main__': +# main() diff --git a/scripts/mounts.sh b/scripts/mounts.sh index a244bc6d6..765b57714 100755 --- a/scripts/mounts.sh +++ b/scripts/mounts.sh @@ -1,6 +1,6 @@ #!/bin/sh # ============================================================================== -# Copyright (c) IBM Corporation 2022, 2023 +# Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -480,6 +480,10 @@ get_python_mount(){ fi } +get_python_mount_echo(){ + get_python_mount $1 + echo "${PYZ_HOME}" +} # Get the zoau home/path given $1/arg else error get_zoau_mount(){ @@ -507,6 +511,11 @@ get_zoau_mount(){ fi } +get_zoau_mount_echo(){ + get_zoau_mount $1 + echo "${ZOAU_HOME}" +} + # ============================================================================== # ********************* Print functions ********************* # ============================================================================== @@ -619,6 +628,12 @@ _test_arrays(){ # Main arg parser ################################################################################ case "$1" in + --get-python-mount) + get_python_mount_echo $2 + ;; + --get-zoau-mount) + get_zoau_mount_echo $2 + ;; --mount) mount "-r -t zfs -f" ;; @@ -653,4 +668,3 @@ case "$1" in fi fi esac - diff --git a/scripts/tests/test_load_balance.py b/scripts/tests/test_load_balance.py new file mode 100644 index 000000000..ea6b928e6 --- /dev/null +++ b/scripts/tests/test_load_balance.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +import random +import os +import sys + +__metaclass__ = type + +def test_case_1(): + assert 1 == 1 + +def test_case_2(): + assert 2 == 2 + +def test_case_3(): + assert 3 == 3 + +def test_case_4(): + assert 4 == 4 + +def test_case_5(): + assert 5 == 5 + +def test_case_6(): + assert 6 == 6 + +def test_case_7(): + assert 7 == 7 + +def test_case_8(): + assert 8 == 8 + +def test_case_9(): + assert 9 == 9 + +def test_case_10(): + assert 10 == 10 + +def test_case_11(): + assert 11 == 11 + +def test_case_12(): + assert 12 == 12 + +def test_case_13(): + print("FOO") + assert 13 == 13 + +def test_case_14(): + assert 14 == 14 + +def test_case_15(): + assert 15 == 15 + +def test_case_16(): + assert 16 == 16 + +def test_case_17(): + assert 17 == 17 + +def test_case_18(): + assert 18 == 18 + +def test_case_19(): + assert 19 == 19 + +def test_case_20(): + assert 20 == 20 + +def test_case_21(): + assert 21 == 21 + +def test_case_22(): + assert 22 == 22 + +def test_case_23(): + assert 23 == 23 + +def test_case_24(): + assert 24 == 24 + +def test_case_25(): + assert 25 == 25 + +def test_case_26(): + assert 26 == 26 + +def test_case_27(): + assert 27 == 27 + +def test_case_28(): + assert 28 == 28 + +def test_case_29(): + assert 29 == 29 + +def test_case_30(): + assert 30 == 30 + +def test_case_31(): + assert 31 == 31 + +def test_case_32(): + assert 32 == 32 + +def test_case_33(): + assert 33 == 33 + +def test_case_34(): + assert 34 == 34 + +def test_case_35(): + assert 35 == 35 + +def test_case_36(): + assert 36 == 36 + +def test_case_37(): + assert 37 == 37 + +def test_case_38(): + assert 38 == 38 + +def test_case_39(): + assert 39 == 39 + +def test_case_40(): + assert 40 == 40 + +def test_case_41(): + assert 41 == 41 + +def test_case_42(): + assert 42 == 42 + +def test_case_43(): + assert 43 == 43 + +def test_case_44(): + assert 44 == 44 + +def test_case_45(): + assert 45 == 45 + +def test_case_46(): + assert 46 == 46 + +def test_case_47(): + assert 47 == 47 + +def test_case_48(): + assert 48 == 48 + +def test_case_49(): + assert 49 == 49 + +def test_case_50(): + assert 50 == 50 + +def test_case_51(): + assert 51 == 51 + +def test_case_52(): + assert 52 == 52 + +def test_case_53(): + assert 53 == 53 + +def test_case_54(): + assert 54 == 54 + +def test_case_55(): + assert 55 == 55 + +def test_case_56(): + assert 56 == 56 + +def test_case_57(): + assert 57 == 57 + +def test_case_58(): + assert 58 == 58 + +def test_case_59(): + assert 59 == 59 + +def test_case_60(): + assert 60 == 60 + +def test_case_61(): + assert 61 == 61 + +def test_case_62(): + assert 62 == 62 + +def test_case_63(): + assert 63 == 63 + +def test_case_64(): + assert 64 == 64 + +def test_case_65(): + assert 65 == 65 + +def test_case_66(): + assert 66 == 66 + +def test_case_67(): + assert 67 == 67 + +def test_case_68(): + assert 68 == 68 + +def test_case_69(): + assert 69 == 69 + +def test_case_70(): + assert 70 == 70 + +def test_case_71(): + assert 71 == 71 + +def test_case_72(): + assert 72 == 72 + +def test_case_73(): + assert 73 == 73 + +def test_case_74(): + assert 74 == 74 + +def test_case_75(): + assert 75 == 75 + +def test_case_76(): + assert 76 == 76 + +def test_case_77(): + assert 77 == 77 + +def test_case_78(): + assert 78 == 78 + +def test_case_79(): + assert 79 == 79 + +def test_case_80(): + assert 80 == 80 + +def test_case_81(): + assert 81 == 81 + +def test_case_82(): + assert 82 == 82 + +def test_case_83(): + assert 83 == 83 + +def test_case_84(): + assert 84 == 84 + +def test_case_85(): + assert 85 == 85 + +def test_case_86(): + assert 86 == 86 + +def test_case_87(): + assert 87 == 87 + +def test_case_88(): + assert 88 == 88 + +def test_case_89(): + assert 89 == 89 + +def test_case_90(): + assert 90 == 90 + +def test_case_91(): + assert 91 == 91 + +def test_case_92(): + assert 8 == random.randrange(7, 9) + +def test_case_93(): + assert 8 == random.randrange(7, 9) + +def test_case_94(): + assert 8 == random.randrange(7, 9) + +def test_case_95(): + assert 8 == random.randrange(7, 9) + +def test_case_96(): + assert 8 == random.randrange(7, 9) + +def test_case_97(): + assert 8 == random.randrange(7, 9) + +def test_case_98(): + assert 98 == -1 + +def test_case_99(): + assert 99 == -1 + +def test_case_100(): + assert 100 == -1 diff --git a/scripts/venv.sh b/scripts/venv.sh index 45c3d130e..3b662536b 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -81,7 +81,7 @@ echo_requirements(){ unset requirements_common unset requirements - requirements_common="requirements-common.env" + requirements_common="configurations/requirements-common.env" unset REQ_COMMON if [ -f "$requirements_common" ]; then @@ -103,9 +103,7 @@ echo_requirements(){ fi done - # for file in `ls requirements-*.sh`; do - # for file in `ls requirements-[0-9].[0-9]*.env`; do - for file in `ls *requirements-[0-9].[0-9]*.env* *requirements-latest* 2>/dev/null`; do + for file in `ls configurations/*requirements-[0-9].[0-9]*.env* configurations/*requirements-latest* 2>/dev/null`; do # Unset the vars from any prior sourced files unset REQ unset requirements @@ -118,11 +116,11 @@ echo_requirements(){ fi if [[ "$file" =~ "latest" ]]; then - # eg extract 'latest' from requirements-latest file name + # eg extract 'latest' from configurations/requirements-latest file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` venv_name="venv"-$ansible_version else - # eg extract 2.14 from requirements-2.14.sh file name + # eg extract 2.14 from configurations/requirements-2.14.sh file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` venv_name="venv"-$ansible_version #echo $venv_name @@ -169,14 +167,13 @@ make_venv_dirs(){ # We should think about the idea of allowing: # --force, --synch, --update thus not sure we need this method and better to # manage this logic inline to write_req - # for file in `ls requirements-[0-9].[0-9]*.env`; do - for file in `ls *requirements-[0-9].[0-9]*.env* *requirements-latest* 2>/dev/null`; do + for file in `ls configurations/*requirements-[0-9].[0-9]*.env* configurations/*requirements-latest* 2>/dev/null`; do if [[ "$file" =~ "latest" ]]; then - # eg extract 'latest' from requirements-latest file name + # eg extract 'latest' from configurations/requirements-latest file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` venv_name="venv"-$ansible_version else - # eg extract 2.14 from requirements-2.14.sh file name + # eg extract 2.14 from configurations/requirements-2.14.sh file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` venv_name="venv"-$ansible_version #echo $venv_name @@ -191,7 +188,7 @@ write_requirements(){ unset requirements unset REQ unset REQ_COMMON - requirements_common_file="requirements-common.env" + requirements_common_file="configurations/requirements-common.env" # Source the requirements file for now, easy way to do this. Exit may not # not be needed but leave it for now. @@ -214,9 +211,7 @@ write_requirements(){ fi done - # for file in `ls requirements-*.sh`; do - # for file in `ls requirements-[0-9].[0-9]*.env`; do - for file in `ls *requirements-[0-9].[0-9]*.env* *requirements-latest* 2>/dev/null`; do + for file in `ls configurations/*requirements-[0-9].[0-9]*.env* configurations/*requirements-latest* 2>/dev/null`; do # Unset the vars from any prior sourced files unset REQ unset requirements @@ -229,12 +224,12 @@ write_requirements(){ fi if [[ "$file" =~ "latest" ]]; then - # eg extract 'latest' from requirements-latest file name + # eg extract 'latest' from configurations/requirements-latest file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` venv_name="venv"-$ansible_version echo $venv_name else - # eg extract 2.14 from requirements-2.14.sh file name + # eg extract 2.14 from configurations/requirements-2.14.sh file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` venv_name="venv"-$ansible_version echo $venv_name @@ -288,6 +283,9 @@ write_requirements(){ cp hosts.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ cp venv.sh "${VENV_HOME_MANAGED}"/"${venv_name}"/ cp profile.sh "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp ../tests/dependencyfinder.py "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp ce.py "${VENV_HOME_MANAGED}"/"${venv_name}"/ + cp -R modules "${VENV_HOME_MANAGED}"/"${venv_name}"/ # Decrypt file if [ "$option_pass" ]; then @@ -318,16 +316,15 @@ write_requirements(){ create_venv_and_pip_install_req(){ - # for file in `ls requirements-[0-9].[0-9]*.env`; do - for file in `ls *requirements-[0-9].[0-9]*.env* *requirements-latest* 2>/dev/null`; do + for file in `ls configurations/*requirements-[0-9].[0-9]*.env* configurations/*requirements-latest* 2>/dev/null`; do unset venv if [[ "$file" =~ "latest" ]]; then - # eg extract 'latest' from requirements-latest file name + # eg extract 'latest' from configurations/requirements-latest file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` venv_name="venv"-$ansible_version else - # eg extract 2.14 from requirements-2.14.sh file name + # eg extract 2.14 from configurations/requirements-2.14.sh file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` venv_name="venv"-$ansible_version #echo $venv_name @@ -382,7 +379,7 @@ discover_python(){ # for python_found in `which python3 | cut -d" " -f3`; do # # The 'pys' array will search for pythons in reverse order, once it finds one that matches - # the requirements-x.xx.env it does not continue searching. Reverse order is important to + # the configurations/requirements-x.xx.env it does not continue searching. Reverse order is important to # maintain. pys=("python3.14" "python3.13" "python3.12" "python3.11" "python3.10" "python3.9" "python3.8") rc=1 @@ -536,6 +533,39 @@ get_host_ids(){ done } +get_host_ids_production(){ + set_hosts_to_array + unset host_index + unset host_prefix + unset host_production + first_entry=true + for tgt in "${HOSTS_ALL[@]}" ; do + host_index=`echo "${tgt}" | cut -d ":" -f 1` + host_prefix=`echo "${tgt}" | cut -d ":" -f 2` + host_production=`echo "${tgt}" | cut -d ":" -f 5` + if [ "$host_production" == "production" ];then + if [ "$first_entry" == "true" ];then + first_entry=false + echo "$host_prefix" + else + echo " $host_prefix" + fi + fi + done +} + + first_entry=true + skip_tests="" + for i in $(echo $skip | sed "s/,/ /g") + do + if [ "$first_entry" == "true" ];then + first_entry=false + skip_tests="$CURR_DIR/tests/functional/modules/$i" + else + skip_tests="$skip_tests $CURR_DIR/tests/functional/modules/$i" + fi + done + # Should renane this with a prefix of set_ to make it more readable ssh_host_credentials(){ arg=$1 @@ -647,6 +677,18 @@ case "$1" in ssh_host_credentials $2 ssh_copy_key ;; +--host-credentials) + ssh_host_credentials $2 + echo "$host" + ;; +--user-credentials) + ssh_host_credentials $2 + echo "$user" + ;; +--pass-credentials) + ssh_host_credentials $2 + echo "$pass" + ;; --host-setup-files) #ec33017a "mounts.env" "mounts.sh" "shell-helper.sh" "profile.sh" ssh_host_credentials $2 ssh_copy_files_and_mount $3 $4 $5 @@ -654,6 +696,9 @@ case "$1" in --targets) get_host_ids ;; +--targets-production) + get_host_ids_production + ;; --config) write_test_config $2 $3 $4 $5 ;; diff --git a/tests/conftest.py b/tests/conftest.py index 9a9cc9ad6..bfdb3fb4b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -21,21 +21,39 @@ def pytest_addoption(parser): - """ Add CLI options and modify optons for pytest-ansible where needed. """ + """ + Add CLI options and modify options for pytest-ansible where needed. + Note: Set the default to to None, otherwise when evaluating with `request.config.getoption("--zinventory"):` + will always return true because a default will be returned. + """ parser.addoption( "--zinventory", "-Z", action="store", - default="test_config.yml", + default=None, help="Absolute path to YAML file containing inventory info for functional testing.", ) + parser.addoption( + "--zinventory-raw", + "-R", + action="store", + default=None, + help="Str - dictionary with values {'host': 'ibm.com', 'user': 'root', 'zoau': '/usr/lpp/zoau', 'pyz': '/usr/lpp/IBM/pyz'}", + ) @pytest.fixture(scope="session") def z_python_interpreter(request): """ Generate temporary shell wrapper for python interpreter. """ - path = request.config.getoption("--zinventory") - helper = ZTestHelper.from_yaml_file(path) + src = None + helper = None + if request.config.getoption("--zinventory"): + src = request.config.getoption("--zinventory") + helper = ZTestHelper.from_yaml_file(src) + elif request.config.getoption("--zinventory-raw"): + src = request.config.getoption("--zinventory-raw") + helper = ZTestHelper.from_args(src) + interpreter_str = helper.build_interpreter_string() inventory = helper.get_inventory_info() python_path = helper.get_python_path() @@ -90,8 +108,17 @@ def ansible_zos_module(request, z_python_interpreter): def volumes_on_systems(ansible_zos_module, request): """ Call the pytest-ansible plugin to check volumes on the system and work properly a list by session.""" path = request.config.getoption("--zinventory") - list_Volumes = get_volumes(ansible_zos_module, path) - yield list_Volumes + list_volumes = None + + # If path is None, check if zinventory-raw is used instead and if so, extract the + # volumes dictionary and pass it along. + if path is None: + src = request.config.getoption("--zinventory-raw") + helper = ZTestHelper.from_args(src) + list_volumes = helper.get_volumes_list() + else: + list_volumes = get_volumes(ansible_zos_module, path) + yield list_volumes @pytest.fixture(scope="session") @@ -100,8 +127,18 @@ def volumes_with_vvds(ansible_zos_module, request): then it will try to create one for each volume found and return volumes only if a VVDS was successfully created for it.""" path = request.config.getoption("--zinventory") - volumes = get_volumes(ansible_zos_module, path) - volumes_with_vvds = get_volumes_with_vvds(ansible_zos_module, volumes) + list_volumes = None + + # If path is None, check if zinventory-raw is used instead and if so, extract the + # volumes dictionary and pass it along. + if path is None: + src = request.config.getoption("--zinventory-raw") + helper = ZTestHelper.from_args(src) + list_volumes = helper.get_volumes_list() + else: + list_volumes = get_volumes(ansible_zos_module, path) + + volumes_with_vvds = get_volumes_with_vvds(ansible_zos_module, list_volumes) yield volumes_with_vvds diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index fa06ca2ee..123537d8b 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -19,7 +19,7 @@ import yaml from shellescape import quote -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( +from ibm_zos_core.plugins.module_utils import ( zoau_version_checker, ) diff --git a/tests/helpers/ztest.py b/tests/helpers/ztest.py index af198d6f0..e471dfb26 100644 --- a/tests/helpers/ztest.py +++ b/tests/helpers/ztest.py @@ -13,6 +13,7 @@ __metaclass__ = type +import json import os import stat import uuid @@ -47,6 +48,96 @@ def from_yaml_file(cls, path): testvars = safe_load(varfile) return cls(**testvars) + @classmethod + def from_args(cls, src): + """ + ZTestHelper provides helper methods to deal with added complexities when testing against a z/OS system. + Similar to method `from_yaml_file(path)`, this method takes a dictionary of required keywords instead + of dictionary from a file so to increase performance. + + Args: + src - (dictionary) with keywords {'host': 'required', 'user': 'required', 'zoau': 'required', 'pyz': 'required', 'pythonpath': 'required', 'extra_args': 'optional'}" + host - z/OS managed node + user - user/omvs segment authorized to run ansible playbooks + zoau - home directory where z Open Automation Utilities is installed + pyz - python home + pythonpath - environment variable that is used to specify the location of Python libraries, eg ZOAU python modules + extra_args - dictionary used to include properties such as 'volumes' or other dynamic content. + + Code Example: + if request.config.getoption("--zinventory-raw"): + src = request.config.getoption("--zinventory-raw") + helper = ZTestHelper.from_args(src) + interpreter_str = helper.build_interpreter_string() + inventory = helper.get_inventory_info() + python_path = helper.get_python_path() + Shell example with pytest: + pytest tests/functional/modules/test_zos_mount_func.py::test_basic_mount --host-pattern=all -s -v --zinventory-raw='{"host": "zvm.ibm.com", "user": "ibmuser", "zoau": "/zoau/v1.3.1", "pyz": "/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz", "pythonpath": "/zoau/v1.3.1/lib/3.10", "extra_args":{"volumes":["222222","000000"],"other":"something else"}}' -s + + { + "host":"zvm.ibm.com", + "user":"ibmuser", + "zoau":"/zoau/v1.3.1", + "pyz":"/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz", + "pythonpath": "/zoau/v1.3.1/lib/3.10", + "extra_args":{ + "volumes":[ + "vol1", + "vol2" + ], + "other": "something else" } + } + """ + #TODO: add support for a positional string, eg "host,user,zoau,pyz" then convert it as needed + + host, user, zoau, pyz, pythonpath, extra_args, extra = None, None, None, None, None, None, None + + src = json.loads(src) + # Traverse the src here , can we trow an exception? + for key, value in src.items(): + if key == "host": + host = value + elif key == "user": + user = value + elif key == "zoau": + zoau = value + elif key == "pyz": + pyz = value + elif key == "pythonpath": + pythonpath = value + elif key == "extra_args": + extra = value + + for prop in [host, user, zoau, pyz, pythonpath]: + if prop is None: + message = f"Invalid value for use with keyword, the value must not be None" + raise ValueError(message) + + environment_vars = dict() + environment_vars.update({'_BPXK_AUTOCVT': 'ON'}) + environment_vars.update({'_CEE_RUNOPTS': '\'FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)\''}) + environment_vars.update({'_TAG_REDIR_IN': 'txt'}) + environment_vars.update({'_TAG_REDIR_OUT': 'txt'}) + environment_vars.update({'LANG': 'C'}) + environment_vars.update({'ZOAU_HOME': zoau}) + environment_vars.update({'LIBPATH': f"{zoau}/lib:{pyz}/lib:/lib:/usr/lib:."}) + environment_vars.update({'PYTHONPATH': f"{pythonpath}"}) # type: ignore + environment_vars.update({'PATH': f"{zoau}/bin:{pyz}/bin:/bin:/usr/sbin:/var/bin"}) + environment_vars.update({'PYTHONSTDINENCODING': 'cp1047'}) + + testvars = dict() + testvars.update({'host': host}) + testvars.update({'user': user}) + testvars.update({'python_path': f"{pyz}/bin/python3"}) + testvars.update({'environment': environment_vars}) + + if(extra): + extra_args = dict() + extra_args.update(extra) + testvars.update(extra_args) + + return cls(**testvars) + def get_inventory_info(self): """ Returns dictionary containing basic info needed to generate a single-host inventory file. """ inventory_info = { @@ -68,3 +159,20 @@ def build_interpreter_string(self): def get_python_path(self): """ Returns python path """ return self._python_path + + def get_extra_args(self) -> dict: + """ Extra args dictionary """ + return self._extra_args + + def get_extra_args(self, key: str): + """ Extra args dictionary """ + return self._extra_args.get(key) or self._extra_args.get(key.lower()) + + def get_volumes_list(self) -> list[str]: + """ Get volumes as a list if its been defined in extra args""" + for key, value in self._extra_args.items(): + if key.lower() == "volumes": + if not isinstance(value, list): + message = f"Invalid value for use with property [{key}], value must be type list[]." + raise ValueError(message) + return value From 8651241dc4deaa6f092cd13b28385c7b5b14bcd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 19 Aug 2024 09:42:57 -0600 Subject: [PATCH 455/495] [Enabler][1592]job_submit_portability (#1658) * First iteration to resolve * Add imports * Add imports * Add fragment --- .../fragments/1658-job_submit_portability.yml | 3 + .../modules/test_zos_job_submit_func.py | 109 ++++++++++-------- 2 files changed, 65 insertions(+), 47 deletions(-) create mode 100644 changelogs/fragments/1658-job_submit_portability.yml diff --git a/changelogs/fragments/1658-job_submit_portability.yml b/changelogs/fragments/1658-job_submit_portability.yml new file mode 100644 index 000000000..83aeb281d --- /dev/null +++ b/changelogs/fragments/1658-job_submit_portability.yml @@ -0,0 +1,3 @@ +trivial: + - zos_job_submit - Remove the use of hard coded dataset and files names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1658). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 927dcfaad..ffd920259 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -18,8 +18,11 @@ import tempfile import re import os +import string +import random from shellescape import quote import pytest +from datetime import datetime from ibm_zos_core.tests.helpers.volumes import Volume_Handler from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name @@ -398,8 +401,9 @@ // """ -TEMP_PATH = "/tmp/jcl" -DATA_SET_NAME_SPECIAL_CHARS = "imstestl.im@1.x#$xx05" +def get_unique_uss_file_name(): + unique_str = "n" + datetime.now().strftime("%H:%M:%S").replace("-", "").replace(":", "") + ".dzp" + return "/tmp/{0}".format(unique_str) @pytest.mark.parametrize( "location", [ @@ -422,9 +426,10 @@ def test_job_submit_pds(ansible_zos_module, location): results = None hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - hosts.all.file(path=TEMP_PATH, state="directory") + temp_path = get_unique_uss_file_name() + hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) hosts.all.zos_data_set( @@ -432,7 +437,7 @@ def test_job_submit_pds(ansible_zos_module, location): ) hosts.all.shell( - cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(temp_path, data_set_name) ) if bool(location.get("default_location")): results = hosts.all.zos_job_submit( @@ -448,30 +453,32 @@ def test_job_submit_pds(ansible_zos_module, location): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_pds_special_characters(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.file(path=TEMP_PATH, state="directory") + temp_path = get_unique_uss_file_name() + data_set_name_special_chars = get_tmp_ds_name(symbols=True) + hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) results = hosts.all.zos_data_set( - name=DATA_SET_NAME_SPECIAL_CHARS, state="present", type="pds", replace=True + name=data_set_name_special_chars, state="present", type="pds", replace=True ) hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) hosts.all.shell( cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format( - TEMP_PATH, DATA_SET_NAME_SPECIAL_CHARS.replace('$', '\$') + temp_path, data_set_name_special_chars.replace('$', '\$') ) ) results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(DATA_SET_NAME_SPECIAL_CHARS), + src="{0}(SAMPLE)".format(data_set_name_special_chars), location="data_set", ) for result in results.contacted.values(): @@ -479,26 +486,27 @@ def test_job_submit_pds_special_characters(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") - hosts.all.zos_data_set(name=DATA_SET_NAME_SPECIAL_CHARS, state="absent") + hosts.all.file(path=temp_path, state="absent") + hosts.all.zos_data_set(name=data_set_name_special_chars, state="absent") def test_job_submit_uss(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.file(path=TEMP_PATH, state="directory") + temp_path = get_unique_uss_file_name() + hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) results = hosts.all.zos_job_submit( - src=f"{TEMP_PATH}/SAMPLE", location="uss", volume=None + src=f"{temp_path}/SAMPLE", location="uss", volume=None ) for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") def test_job_submit_local(ansible_zos_module): @@ -544,12 +552,13 @@ def test_job_submit_pds_volume(ansible_zos_module, volumes_on_systems): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + temp_path = get_unique_uss_file_name() volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() - hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) hosts.all.zos_data_set( @@ -557,7 +566,7 @@ def test_job_submit_pds_volume(ansible_zos_module, volumes_on_systems): ) hosts.all.shell( - cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(TEMP_PATH, data_set_name) + cmd="cp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(temp_path, data_set_name) ) hosts.all.zos_data_set( @@ -574,7 +583,7 @@ def test_job_submit_pds_volume(ansible_zos_module, volumes_on_systems): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get('changed') is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -582,11 +591,12 @@ def test_job_submit_pds_5_sec_job_wait_15(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - hosts.all.file(path=TEMP_PATH, state="directory") + temp_path = get_unique_uss_file_name() + hosts.all.file(path=temp_path, state="directory") wait_time_s = 15 hosts.all.shell( - cmd=f"echo {quote(JCL_FILE_CONTENTS_05_SEC)} > {TEMP_PATH}/BPXSLEEP" + cmd=f"echo {quote(JCL_FILE_CONTENTS_05_SEC)} > {temp_path}/BPXSLEEP" ) hosts.all.zos_data_set( @@ -594,7 +604,7 @@ def test_job_submit_pds_5_sec_job_wait_15(ansible_zos_module): ) hosts.all.shell( - cmd=f"cp {TEMP_PATH}/BPXSLEEP \"//'{data_set_name}(BPXSLEEP)'\"" + cmd=f"cp {temp_path}/BPXSLEEP \"//'{data_set_name}(BPXSLEEP)'\"" ) hosts = ansible_zos_module @@ -607,7 +617,7 @@ def test_job_submit_pds_5_sec_job_wait_15(ansible_zos_module): assert result.get('changed') is True assert result.get('duration') <= wait_time_s finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -615,11 +625,12 @@ def test_job_submit_pds_30_sec_job_wait_60(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - hosts.all.file(path=TEMP_PATH, state="directory") + temp_path = get_unique_uss_file_name() + hosts.all.file(path=temp_path, state="directory") wait_time_s = 60 hosts.all.shell( - cmd=f"echo {quote(JCL_FILE_CONTENTS_30_SEC)} > {TEMP_PATH}/BPXSLEEP" + cmd=f"echo {quote(JCL_FILE_CONTENTS_30_SEC)} > {temp_path}/BPXSLEEP" ) hosts.all.zos_data_set( @@ -627,7 +638,7 @@ def test_job_submit_pds_30_sec_job_wait_60(ansible_zos_module): ) hosts.all.shell( - cmd=f"cp {TEMP_PATH}/BPXSLEEP \"//'{data_set_name}(BPXSLEEP)'\"" + cmd=f"cp {temp_path}/BPXSLEEP \"//'{data_set_name}(BPXSLEEP)'\"" ) hosts = ansible_zos_module @@ -640,7 +651,7 @@ def test_job_submit_pds_30_sec_job_wait_60(ansible_zos_module): assert result.get('changed') is True assert result.get('duration') <= wait_time_s finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): @@ -648,11 +659,12 @@ def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - hosts.all.file(path=TEMP_PATH, state="directory") + temp_path = get_unique_uss_file_name() + hosts.all.file(path=temp_path, state="directory") wait_time_s = 10 hosts.all.shell( - cmd=f"echo {quote(JCL_FILE_CONTENTS_30_SEC)} > {TEMP_PATH}/BPXSLEEP" + cmd=f"echo {quote(JCL_FILE_CONTENTS_30_SEC)} > {temp_path}/BPXSLEEP" ) hosts.all.zos_data_set( @@ -660,7 +672,7 @@ def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): ) hosts.all.shell( - cmd=f"cp {TEMP_PATH}/BPXSLEEP \"//'{data_set_name}(BPXSLEEP)'\"" + cmd=f"cp {temp_path}/BPXSLEEP \"//'{data_set_name}(BPXSLEEP)'\"" ) hosts = ansible_zos_module @@ -674,7 +686,7 @@ def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): # expecting at least "long running job that exceeded its maximum wait" assert re.search(r'exceeded', repr(result.get("msg"))) finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -823,12 +835,13 @@ def test_job_submit_jinja_template(ansible_zos_module, args): def test_job_submit_full_input(ansible_zos_module): try: hosts = ansible_zos_module - hosts.all.file(path=TEMP_PATH, state="directory") + temp_path = get_unique_uss_file_name() + hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd=f"echo {quote(JCL_FULL_INPUT)} > {TEMP_PATH}/SAMPLE" + cmd=f"echo {quote(JCL_FULL_INPUT)} > {temp_path}/SAMPLE" ) results = hosts.all.zos_job_submit( - src=f"{TEMP_PATH}/SAMPLE", + src=f"{temp_path}/SAMPLE", location="uss", volume=None, # This job used to set wait=True, but since it has been deprecated @@ -840,7 +853,7 @@ def test_job_submit_full_input(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) @@ -975,18 +988,19 @@ def test_job_from_gdg_source(ansible_zos_module, generation): try: # Creating a GDG for the test. source = get_tmp_ds_name() + temp_path = get_unique_uss_file_name() gds_name = f"{source}({generation})" hosts.all.zos_data_set(name=source, state="present", type="gdg", limit=3) hosts.all.zos_data_set(name=f"{source}(+1)", state="present", type="seq") hosts.all.zos_data_set(name=f"{source}(+1)", state="present", type="seq") # Copying the JCL to the GDS. - hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH) + cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) hosts.all.shell( - cmd="dcp '{0}/SAMPLE' '{1}'".format(TEMP_PATH, gds_name) + cmd="dcp '{0}/SAMPLE' '{1}'".format(temp_path, gds_name) ) results = hosts.all.zos_job_submit(src=gds_name, location="data_set") @@ -995,7 +1009,7 @@ def test_job_from_gdg_source(ansible_zos_module, generation): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=f"{source}(0)", state="absent") hosts.all.zos_data_set(name=f"{source}(-1)", state="absent") hosts.all.zos_data_set(name=source, state="absent") @@ -1049,16 +1063,17 @@ def test_inexistent_positive_gds(ansible_zos_module): def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): try: hosts = ansible_zos_module + temp_path = get_unique_uss_file_name() # Copy C source and compile it. - hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd=f"echo {quote(C_SRC_INVALID_UTF8)} > {TEMP_PATH}/noprint.c" + cmd=f"echo {quote(C_SRC_INVALID_UTF8)} > {temp_path}/noprint.c" ) - hosts.all.shell(cmd=f"xlc -o {TEMP_PATH}/noprint {TEMP_PATH}/noprint.c") + hosts.all.shell(cmd=f"xlc -o {temp_path}/noprint {temp_path}/noprint.c") # Create local JCL and submit it. tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w",encoding="utf-8") as f: - f.write(JCL_INVALID_UTF8_CHARS_EXC.format(TEMP_PATH)) + f.write(JCL_INVALID_UTF8_CHARS_EXC.format(temp_path)) results = hosts.all.zos_job_submit( src=tmp_file.name, @@ -1073,4 +1088,4 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") From f55d6a7215c5ce7b0c098db922d17ff46f5cec1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:44:19 -0600 Subject: [PATCH 456/495] [Enabler][1594]test_fetch_portability (#1657) * Fix all hardcoded content * Add fix to fetch * Bad declaration fixed * Bad declaration fixed * Bad declaration fixed * Add fragment * Fix comments * Fix on close * Fix delete * Fix delete * Fix delete * Fix delete * Fix delete * Fix delete * Fix delete * Fix delete --- .../fragments/1657-test_fetch_portability.yml | 3 + .../functional/modules/test_zos_fetch_func.py | 55 +++++++++++-------- 2 files changed, 34 insertions(+), 24 deletions(-) create mode 100644 changelogs/fragments/1657-test_fetch_portability.yml diff --git a/changelogs/fragments/1657-test_fetch_portability.yml b/changelogs/fragments/1657-test_fetch_portability.yml new file mode 100644 index 000000000..97c5bbc5b --- /dev/null +++ b/changelogs/fragments/1657-test_fetch_portability.yml @@ -0,0 +1,3 @@ +trivial: + - zos_fetch - Remove the use of hard coded dataset and files names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1657). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 83d0c4947..45ca96dd5 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -18,9 +18,14 @@ import stat import re import pytest +import string +import random +import tempfile from hashlib import sha256 from ansible.utils.hashing import checksum +from datetime import datetime + from shellescape import quote # pylint: disable-next=import-error @@ -38,7 +43,7 @@ FROM_ENCODING = "IBM-1047" TO_ENCODING = "ISO8859-1" -USS_FILE = "/tmp/fetch.data" + TEST_DATA = """0001This is for encode conversion testsing 0002This is for encode conversion testsing 0003This is for encode conversion testsing @@ -81,6 +86,11 @@ 00000003A record """ +def get_unique_uss_file_name(): + unique_str = "EN" + datetime.now().strftime("%H:%M:%S").replace("-", "").replace(":", "") + "CODE" + random.choice(string.ascii_letters) + return "/tmp/{0}".format(unique_str) + + def extract_member_name(data_set): start = data_set.find("(") member = "" @@ -184,7 +194,7 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module params = { "src":"/etc/profile", - "dest":"/tmp/", + "dest": "/tmp/", "flat":True } dest_path = "/tmp/profile" @@ -286,11 +296,12 @@ def test_fetch_partitioned_data_set(ansible_zos_module): def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - temp_jcl_path = "/tmp/ansible" + temp_jcl_path = get_unique_uss_file_name() test_vsam = get_tmp_ds_name() dest_path = "/tmp/" + test_vsam volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() + uss_file = get_unique_uss_file_name() try: # start by creating the vsam dataset (could use a helper instead? ) hosts.all.file(path=temp_jcl_path, state="directory") @@ -300,9 +311,9 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): hosts.all.zos_job_submit( src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 ) - hosts.all.shell(cmd=f"echo \"{TEST_DATA}\c\" > {USS_FILE}") + hosts.all.shell(cmd=f"echo \"{TEST_DATA}\c\" > {uss_file}") hosts.all.zos_encode( - src=USS_FILE, + src=uss_file, dest=test_vsam, encoding={ "from": FROM_ENCODING, @@ -331,7 +342,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): if os.path.exists(dest_path): None os.remove(dest_path) - hosts.all.file(path=USS_FILE, state="absent") + hosts.all.file(path=uss_file, state="absent") hosts.all.file(path=temp_jcl_path, state="absent") @@ -511,14 +522,14 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): record_format="fba", record_length=25, ) + dest_path = get_unique_uss_file_name() hosts.all.zos_data_set(name=pds_name, type="pds") hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes") params = { "src":pds_name + "(MYDATA)", - "dest":"/tmp/", + "dest": dest_path, "flat":True } - dest_path = "/tmp/MYDATA" try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -630,7 +641,6 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): space_type="m", space_primary=5 ) - ds_name = TEST_PS hosts.all.zos_data_set(name=TEST_PS, state="present") hosts.all.shell(cmd=f"decho \"{TEST_DATA}\" \"{TEST_PS}\"") dest_path = "/tmp/" + TEST_PS @@ -660,7 +670,6 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) pds_name = get_tmp_ds_name() dest_path = "/tmp/" + pds_name full_path = dest_path + "/MYDATA" - pds_name_mem = pds_name + "(MYDATA)" hosts.all.zos_data_set( name=pds_name, type="pds", @@ -696,22 +705,17 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) def test_fetch_uss_file_insufficient_write_permission_fails(ansible_zos_module): hosts = ansible_zos_module - dest_path = "/tmp/profile" - with open(dest_path, "w",encoding="utf-8") as dest_file: - dest_file.close() - os.chmod(dest_path, stat.S_IREAD) + dest_path = tempfile.NamedTemporaryFile(mode='r+b') + os.chmod(dest_path.name, stat.S_IREAD) params = { "src":"/etc/profile", - "dest":"/tmp/", + "dest": dest_path.name, "flat":True } - try: - results = hosts.all.zos_fetch(**params) - for result in results.contacted.values(): - assert "msg" in result.keys() - finally: - if os.path.exists(dest_path): - os.remove(dest_path) + results = hosts.all.zos_fetch(**params) + for result in results.contacted.values(): + assert "msg" in result.keys() + dest_path.close() def test_fetch_pds_dir_insufficient_write_permission_fails(ansible_zos_module): @@ -738,7 +742,10 @@ def test_fetch_use_data_set_qualifier(ansible_zos_module): hosts = ansible_zos_module src = get_tmp_ds_name()[:25] dest_path = "/tmp/"+ src - hosts.all.zos_data_set(name="OMVSADM." + src, type="seq", state="present") + results = hosts.all.shell(cmd="echo $USER") + for result in results.contacted.values(): + hlq = result.get("stdout") + hosts.all.zos_data_set(name=hlq + '.' + src, type="seq", state="present") params = { "src":src, "dest":"/tmp/", @@ -755,7 +762,7 @@ def test_fetch_use_data_set_qualifier(ansible_zos_module): finally: if os.path.exists(dest_path): os.remove(dest_path) - hosts.all.zos_data_set(name="OMVSADM." + src, state="absent") + hosts.all.zos_data_set(name="{0}.".format(hlq) + src, state="absent") def test_fetch_flat_create_dirs(ansible_zos_module, z_python_interpreter): From b7a4e360b5e219c9cf2bdc781a9295ff8a4b5148 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Mon, 26 Aug 2024 13:16:03 -0600 Subject: [PATCH 457/495] [Bugfix] [zos_job_output] [zos_job_query] Fix username validator in job util (#1661) * Change username validation in job util * Fix username arg parser * Add test for new username type validator * Add changelog fragment * Fix encoding test * Add more information to fragment * Move username validator to BetterArgHandler class --- .../1661-job-owner-valid-characters.yml | 9 +++++ plugins/module_utils/better_arg_parser.py | 37 +++++++++++++++++++ plugins/module_utils/job.py | 9 +++-- .../module_utils/test_arg_parser.py | 22 +++++++++++ .../modules/test_zos_encode_func.py | 2 +- 5 files changed, 74 insertions(+), 5 deletions(-) create mode 100644 changelogs/fragments/1661-job-owner-valid-characters.yml diff --git a/changelogs/fragments/1661-job-owner-valid-characters.yml b/changelogs/fragments/1661-job-owner-valid-characters.yml new file mode 100644 index 000000000..7632ca96d --- /dev/null +++ b/changelogs/fragments/1661-job-owner-valid-characters.yml @@ -0,0 +1,9 @@ +bugfixes: + - zos_job_output - RACF user names containing a ``@``, ``$``, or ``#`` + raised an invalid argument error. Fix now allows the use of all valid + characters for a RACF user. + (https://github.com/ansible-collections/ibm_zos_core/pull/1661). + - zos_job_query - RACF user names containing a ``@``, ``$``, or ``#`` + raised an invalid argument error. Fix now allows the use of all valid + characters for a RACF user. + (https://github.com/ansible-collections/ibm_zos_core/pull/1661). \ No newline at end of file diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index 449b25314..e5dd8e975 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -163,6 +163,7 @@ def __init__(self, arg_name, contents, resolved_args, arg_defs): "qualifier": self._qualifier_type, "qualifier_or_empty": self._qualifier_or_empty_type, "qualifier_pattern": self._qualifier_pattern_type, + "username_pattern": self._username_pattern_type, "volume": self._volume_type, "data_set_or_path": self._data_set_or_path_type, "encoding": self._encoding_type, @@ -565,6 +566,42 @@ def _qualifier_pattern_type(self, contents, resolve_dependencies): ) return str(contents) + def _username_pattern_type(self, contents, resolve_dependencies): + """Resolver for username_pattern type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, + which have already been handled, + for use during current arguments handling operations. + + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. + """ + # Valid characters are the following: + # A - Z, 0 - 9, $, @, # + if not fullmatch( + r"^(?:[A-Z$#@]{1}[A-Z0-9$#@]{0,7})|(?:\*{1})|(?:[A-Z$#@]{1}[A-Z0-9$#@]{0,6}\*{1})$", + str(contents), + IGNORECASE, + ): + raise ValueError( + 'Invalid argument type for "{0}". Expected a valid username.'.format( + contents + ) + ) + return str(contents) + def _volume_type(self, contents, resolve_dependencies): """Resolver for volume type arguments. diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index ab2c98a62..536602dc4 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -90,7 +90,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru """ arg_defs = dict( job_id=dict(arg_type="qualifier_pattern"), - owner=dict(arg_type="qualifier_pattern"), + owner=dict(arg_type="username_pattern"), job_name=dict(arg_type="qualifier_pattern"), dd_name=dict(arg_type=_ddname_pattern), ) @@ -224,7 +224,7 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): """ arg_defs = dict( job_id=dict(arg_type="str"), - owner=dict(arg_type="qualifier_pattern"), + owner=dict(arg_type="username_pattern"), job_name=dict(arg_type="str"), ) @@ -505,7 +505,8 @@ def _ddname_pattern(contents, resolve_dependencies): ---------- contents : bool The contents of the argument. - resolved_dependencies {dict} -- Contains all of the dependencies and their contents, + resolved_dependencies : dict + Contains all of the dependencies and their contents, which have already been handled, for use during current arguments handling operations. @@ -525,7 +526,7 @@ def _ddname_pattern(contents, resolve_dependencies): re.IGNORECASE, ): raise ValueError( - 'Invalid argument type for "{0}". expected "ddname_pattern"'.format( + 'Invalid argument type for "{0}". Expected "ddname_pattern"'.format( contents ) ) diff --git a/tests/functional/module_utils/test_arg_parser.py b/tests/functional/module_utils/test_arg_parser.py index 0dd52eb0f..294f6c5d7 100644 --- a/tests/functional/module_utils/test_arg_parser.py +++ b/tests/functional/module_utils/test_arg_parser.py @@ -930,6 +930,28 @@ def special_names_get_uppercase(value, dependencies, kwargs): assert result.get("person").get("name") == "john" +def test_username_type_valid(): + # Testing all valid characters for a TSO/RACF user. + username = "@4$user#" + + # Mocking the arg definition from module_utils/job.py. + arg_defs = { + "job_id": {"arg_type": "str"}, + "owner": {"arg_type": "username_pattern"}, + "job_name": {"arg_type": "str"} + } + + parser = BetterArgParser(arg_defs) + result = parser.parse_args({ + "job_id": "*", + "owner": username, + "job_name": "*" + }) + + # The parser should accept the username as a valid value. + assert result.get("owner") == username + + @pytest.mark.parametrize( ("arg_type", "name"), [ diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 85f977660..f696d821a 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -1131,7 +1131,7 @@ def test_gdg_encoding_conversion_invalid_gdg(ansible_zos_module): for result in results.contacted.values(): assert result.get("msg") is not None - assert "not yet supported" in result.get("msg") + assert "not supported" in result.get("msg") assert result.get("backup_name") is None assert result.get("changed") is False assert result.get("failed") is True From 5f5a6a2dca0b2fa684133b18117c8ccf843f6864 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 28 Aug 2024 10:05:30 -0600 Subject: [PATCH 458/495] [zos_copy] Enable zos_copy test cases portability (#1664) * Updated test cases to allow portability * removed KSDS hardcoded data set * Continued removing hardcoded data sets * Fixed merge * Updated uss file names to use temporary names * Added utils * Fixed test case * Fixed test case * Fixed test case by creating folder * Updated other test cases to use module_utils/get_random_file_name instead of implementing its own function * Added changelog * Updated randome file names --- .../fragments/1664-portability-zos_copy.yml | 11 + .../modules/test_zos_backup_restore.py | 18 +- .../functional/modules/test_zos_copy_func.py | 410 ++++++++---------- .../modules/test_zos_encode_func.py | 70 ++- .../functional/modules/test_zos_fetch_func.py | 13 +- .../modules/test_zos_job_submit_func.py | 28 +- tests/helpers/utils.py | 43 ++ 7 files changed, 289 insertions(+), 304 deletions(-) create mode 100644 changelogs/fragments/1664-portability-zos_copy.yml create mode 100644 tests/helpers/utils.py diff --git a/changelogs/fragments/1664-portability-zos_copy.yml b/changelogs/fragments/1664-portability-zos_copy.yml new file mode 100644 index 000000000..267027273 --- /dev/null +++ b/changelogs/fragments/1664-portability-zos_copy.yml @@ -0,0 +1,11 @@ +trivial: + - zos_copy - Remove the use of hard coded dataset and files names using a module_utils function. + (https://github.com/ansible-collections/ibm_zos_core/pull/1664). + - zos_backup_restore - Remove the use of hard coded dataset and files names using a module_utils function. + (https://github.com/ansible-collections/ibm_zos_core/pull/1664). + - zos_encode - Remove the use of hard coded dataset and files names using a module_utils function. + (https://github.com/ansible-collections/ibm_zos_core/pull/1664). + - zos_fetch - Remove the use of hard coded dataset and files names using a module_utils function. + (https://github.com/ansible-collections/ibm_zos_core/pull/1664). + - zos_job_submit - Remove the use of hard coded dataset and files names using a module_utils function. + (https://github.com/ansible-collections/ibm_zos_core/pull/1664). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 1b01bebc7..e74fc0aa4 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -23,13 +23,11 @@ from re import search, IGNORECASE, MULTILINE import string import random -from datetime import datetime +from ibm_zos_core.tests.helpers.utils import get_random_file_name DATA_SET_CONTENTS = "HELLO WORLD" +TMP_DIRECTORY = "/tmp/" -def get_unique_uss_file_name(): - unique_str = "n" + datetime.now().strftime("%H:%M:%S").replace("-", "").replace(":", "") + ".dzp" - return "/tmp/{0}".format(unique_str) # ---------------------------------------------------------------------------- # # Helper functions # @@ -203,7 +201,7 @@ def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover) if backup_name == "DATA_SET": backup_name = get_tmp_ds_name(1,1) else: - backup_name = get_unique_uss_file_name() + backup_name = get_random_file_name(dir=TMP_DIRECTORY, prefix='.dzp') try: if not overwrite: delete_data_set_or_file(hosts, backup_name) @@ -243,7 +241,7 @@ def test_backup_of_data_set_when_backup_dest_exists( if backup_name == "DATA_SET": backup_name = get_tmp_ds_name(1,1) else: - backup_name = get_unique_uss_file_name() + backup_name = get_random_file_name(dir=TMP_DIRECTORY, prefix='.dzp') try: create_data_set_or_file_with_contents(hosts, backup_name, DATA_SET_CONTENTS) assert_data_set_or_file_exists(hosts, backup_name) @@ -291,7 +289,7 @@ def test_backup_and_restore_of_data_set( if backup_name == "DATA_SET": backup_name = get_tmp_ds_name(1,1) else: - backup_name = get_unique_uss_file_name() + backup_name = get_random_file_name(dir=TMP_DIRECTORY, prefix='.dzp') try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -349,7 +347,7 @@ def test_backup_and_restore_of_data_set_various_space_measurements( if backup_name == "DATA_SET": backup_name = get_tmp_ds_name(1,1) else: - backup_name = get_unique_uss_file_name() + backup_name = get_random_file_name(dir=TMP_DIRECTORY, prefix='.dzp') try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -405,7 +403,7 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( if backup_name == "DATA_SET": backup_name = get_tmp_ds_name(1,1) else: - backup_name = get_unique_uss_file_name() + backup_name = get_random_file_name(dir=TMP_DIRECTORY, prefix='.dzp') try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) @@ -583,7 +581,7 @@ def test_restore_of_data_set_when_backup_does_not_exist( if backup_name == "DATA_SET": backup_name = get_tmp_ds_name(1,1) else: - backup_name = get_unique_uss_file_name() + backup_name = get_random_file_name(dir=TMP_DIRECTORY, prefix='.dzp') new_hlq = "N" + get_random_q(4) hlqs.append(new_hlq) try: diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 76c75dd32..9df423fb8 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -19,14 +19,16 @@ import re import time import tempfile -from tempfile import mkstemp import subprocess from ibm_zos_core.tests.helpers.volumes import Volume_Handler from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.utils import get_random_file_name __metaclass__ = type +# Test temporary folder +TMP_DIRECTORY = '/tmp/' DUMMY_DATA = """DUMMY DATA ---- LINE 001 ------ DUMMY DATA ---- LINE 002 ------ DUMMY DATA ---- LINE 003 ------ @@ -113,13 +115,6 @@ # SHELL_EXECUTABLE = "/usr/lpp/rsusr/ported/bin/bash" SHELL_EXECUTABLE = "/bin/sh" -TEST_PS = "IMSTESTL.IMS01.DDCHKPT" -TEST_PDS = "IMSTESTL.COMNUC" -TEST_PDS_MEMBER = "IMSTESTL.COMNUC(ATRQUERY)" -TEST_VSAM = "IMSTESTL.LDS01.WADS2" -TEST_VSAM_KSDS = "SYS1.STGINDEX" -TEST_PDSE = "SYS1.NFSLIBE" -TEST_PDSE_MEMBER = "SYS1.NFSLIBE(GFSAMAIN)" COBOL_PRINT_STR = "HELLO WORLD ONE" COBOL_PRINT_STR2 = "HELLO WORLD TWO" @@ -197,7 +192,7 @@ call_c_hello_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //LOCKMEM EXEC PGM=BPXBATCH //STDPARM DD * -SH /tmp/c/hello_world +SH {0}/hello_world //STDIN DD DUMMY //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* @@ -221,7 +216,7 @@ call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //LOCKMEM EXEC PGM=BPXBATCH //STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}' +SH {0}/pdse-lock '{1}' //STDIN DD DUMMY //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* @@ -263,7 +258,7 @@ def populate_partitioned_data_set(hosts, name, ds_type, members=None): members = ["MEMBER1", "MEMBER2", "MEMBER3"] ds_list = ["{0}({1})".format(name, member) for member in members] - hosts.all.zos_data_set(name=name, type=ds_type, state="present") + hosts.all.shell(cmd=f"dtouch -t{ds_type} {name}") for member in ds_list: hosts.all.shell( @@ -324,9 +319,9 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, hosts.all.zos_data_set(**params) if add_data: - record_src = "/tmp/zos_copy_vsam_record" + record_src = get_random_file_name(dir=TMP_DIRECTORY) - hosts.all.zos_copy(content=VSAM_RECORDS, dest=record_src) + hosts.all.shell(cmd=f"echo '{VSAM_RECORDS}' > '{record_src}' ") hosts.all.zos_encode(src=record_src, dest=name, encoding={"from": "ISO8859-1", "to": "IBM-1047"}) hosts.all.file(path=record_src, state="absent") @@ -354,7 +349,7 @@ def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, lo loadlib_mem (str) - candidate loadlib member loadlib_alias_mem (str) - alias member name """ - temp_jcl_uss_path = "/tmp/link.jcl" + temp_jcl_uss_path = get_random_file_name(dir=TMP_DIRECTORY) rc = 0 try: # Copy over the Link program to USS @@ -365,7 +360,7 @@ def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, lo ) # Submit link JCL. job_result = hosts.all.zos_job_submit( - src="/tmp/link.jcl", + src=temp_jcl_uss_path, location="uss", wait_time_s=60 ) @@ -413,12 +408,12 @@ def generate_loadlib(hosts, cobol_src_pds, cobol_src_mems, loadlib_pds, loadlib_ validate_loadlib_pgm(hosts, steplib=loadlib_pds, pgm_name=loadlib_alias_mems[1], expected_output_str=COBOL_PRINT_STR2) -def generate_executable_uss(hosts, src, src_jcl_call): +def generate_executable_uss(hosts, dir, src, src_jcl_call): hosts.all.zos_copy(content=hello_world, dest=src, force=True) - hosts.all.zos_copy(content=call_c_hello_jcl, dest=src_jcl_call, force=True) - hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir="/tmp/c/") + hosts.all.zos_copy(content=call_c_hello_jcl.format(dir), dest=src_jcl_call, force=True) + hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir=dir) hosts.all.shell(cmd="submit {0}".format(src_jcl_call)) - verify_exe_src = hosts.all.shell(cmd="/tmp/c/hello_world") + verify_exe_src = hosts.all.shell(cmd="{0}/hello_world".format(dir)) for res in verify_exe_src.contacted.values(): assert res.get("rc") == 0 stdout = res.get("stdout") @@ -436,7 +431,7 @@ def generate_executable_uss(hosts, src, src_jcl_call): ]) def test_copy_file_to_non_existing_uss_file(ansible_zos_module, src): hosts = ansible_zos_module - dest_path = "/tmp/zos_copy_test_profile" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) try: hosts.all.file(path=dest_path, state="absent") @@ -469,10 +464,9 @@ def test_copy_file_to_non_existing_uss_file(ansible_zos_module, src): ]) def test_copy_file_to_existing_uss_file(ansible_zos_module, src): hosts = ansible_zos_module - dest_path = "/tmp/test_profile" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) try: - hosts.all.file(path=dest_path, state="absent") hosts.all.file(path=dest_path, state="touch") stat_res = list(hosts.all.stat(path=dest_path).contacted.values()) timestamp = stat_res[0].get("stat").get("atime") @@ -509,11 +503,11 @@ def test_copy_file_to_existing_uss_file(ansible_zos_module, src): ]) def test_copy_file_to_uss_dir(ansible_zos_module, src): hosts = ansible_zos_module - dest = "/tmp" - dest_path = "/tmp/profile" + dest = get_random_file_name(suffix='/', dir=TMP_DIRECTORY) + # This name is kept because we copy from /etc/profile file and keep the original file name. + dest_path = dest + "profile" try: - hosts.all.file(path=dest_path, state="absent") copy_res = hosts.all.zos_copy(src=src["src"], dest=dest, is_binary=src["is_binary"], remote_src=src["is_remote"]) @@ -526,14 +520,14 @@ def test_copy_file_to_uss_dir(ansible_zos_module, src): for st in stat_res.contacted.values(): assert st.get("stat").get("exists") is True finally: - hosts.all.file(path=dest_path, state="absent") + hosts.all.file(path=dest, state="absent") @pytest.mark.uss def test_copy_file_to_uss_dir_missing_parents(ansible_zos_module): hosts = ansible_zos_module src = "/etc/profile" - dest_dir = "/tmp/parent_dir" + dest_dir = get_random_file_name(dir=TMP_DIRECTORY) dest = "{0}/subdir/profile".format(dest_dir) try: @@ -555,8 +549,8 @@ def test_copy_file_to_uss_dir_missing_parents(ansible_zos_module): @pytest.mark.uss def test_copy_local_symlink_to_uss_file(ansible_zos_module): hosts = ansible_zos_module - src_lnk = "/tmp/etclnk" - dest_path = "/tmp/profile" + src_lnk = get_random_file_name(dir=TMP_DIRECTORY) + dest_path = get_random_file_name(dir=TMP_DIRECTORY) try: try: os.symlink("/etc/profile", src_lnk) @@ -582,9 +576,8 @@ def test_copy_local_symlink_to_uss_file(ansible_zos_module): @pytest.mark.uss def test_copy_local_file_to_uss_file_convert_encoding(ansible_zos_module): hosts = ansible_zos_module - dest_path = "/tmp/profile" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) + "/profile" try: - hosts.all.file(path=dest_path, state="absent") copy_res = hosts.all.zos_copy( src="/etc/profile", dest=dest_path, @@ -605,8 +598,8 @@ def test_copy_local_file_to_uss_file_convert_encoding(ansible_zos_module): @pytest.mark.uss def test_copy_inline_content_to_uss_dir(ansible_zos_module): hosts = ansible_zos_module - dest = "/tmp/" - dest_path = "/tmp/inline_copy" + dest = get_random_file_name(dir=TMP_DIRECTORY, suffix='/') + dest_path = dest + "inline_copy" try: copy_res = hosts.all.zos_copy(content="Inline content", dest=dest) @@ -619,15 +612,15 @@ def test_copy_inline_content_to_uss_dir(ansible_zos_module): for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: - hosts.all.file(path=dest_path, state="absent") + hosts.all.file(path=dest, state="absent") @pytest.mark.uss def test_copy_dir_to_existing_uss_dir_not_forced(ansible_zos_module): hosts = ansible_zos_module - src_dir = "/tmp/new_dir/" + src_dir = get_random_file_name(dir=TMP_DIRECTORY, suffix='/') src_file = "{0}profile".format(src_dir) - dest_dir = "/tmp/test_dir" + dest_dir = get_random_file_name(dir=TMP_DIRECTORY, suffix='/') dest_old_content = "{0}/old_dir".format(dest_dir) try: @@ -657,7 +650,7 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding(ansible_zos_module hosts = ansible_zos_module # Remote path - path = "/tmp/ansible" + path = get_random_file_name(dir=TMP_DIRECTORY, suffix='/') # Remote src path with original files src_path = path + "/src" @@ -753,7 +746,7 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding(ansible_zos_module @pytest.mark.uss def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_module): hosts = ansible_zos_module - dest_path = "/tmp/test/" + dest_path = get_random_file_name(dir=TMP_DIRECTORY, suffix='/') try: source_1 = tempfile.TemporaryDirectory(prefix="level_", suffix="_1") @@ -797,7 +790,7 @@ def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory): this means we only copy that directory contents without creating it on the remote. """ hosts = ansible_zos_module - dest_path = "/tmp/new_dir" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) temp_path = tempfile.mkdtemp() src_basename = "source" if copy_directory else "source/" @@ -845,9 +838,9 @@ def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory): def test_copy_uss_dir_to_non_existing_dir(ansible_zos_module, copy_directory): hosts = ansible_zos_module src_basename = "source_dir" if copy_directory else "source_dir/" - src_dir = "/tmp/{0}".format(src_basename) + src_dir = get_random_file_name(dir=TMP_DIRECTORY) + "/" + src_basename src_file = os.path.normpath("{0}/profile".format(src_dir)) - dest_dir = "/tmp/dest_dir" + dest_dir = get_random_file_name(dir=TMP_DIRECTORY) try: hosts.all.file(path=src_dir, state="directory") @@ -894,7 +887,7 @@ def test_copy_uss_dir_to_non_existing_dir(ansible_zos_module, copy_directory): @pytest.mark.parametrize("copy_directory", [False, True]) def test_copy_local_dir_to_existing_dir_forced(ansible_zos_module, copy_directory): hosts = ansible_zos_module - dest_path = "/tmp/new_dir" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) dest_file = "{0}/profile".format(dest_path) temp_path = tempfile.mkdtemp() @@ -954,10 +947,11 @@ def test_copy_local_dir_to_existing_dir_forced(ansible_zos_module, copy_director @pytest.mark.parametrize("copy_directory", [False, True]) def test_copy_uss_dir_to_existing_dir_forced(ansible_zos_module, copy_directory): hosts = ansible_zos_module - src_basename = "source_dir" if copy_directory else "source_dir/" - src_dir = "/tmp/{0}".format(src_basename) + src_basename = get_random_file_name() if copy_directory else get_random_file_name(suffix='/') + src_parent = get_random_file_name(dir=TMP_DIRECTORY, suffix='/') + src_dir = src_parent + src_basename src_file = os.path.normpath("{0}/profile".format(src_dir)) - dest_dir = "/tmp/dest_dir" + dest_dir = get_random_file_name(dir=TMP_DIRECTORY) dest_file = "{0}/file".format(dest_dir) try: @@ -1006,7 +1000,7 @@ def test_copy_uss_dir_to_existing_dir_forced(ansible_zos_module, copy_directory) assert result.get("stat").get("isdir") is False finally: - hosts.all.file(path=src_dir, state="absent") + hosts.all.file(path=src_parent, state="absent") hosts.all.file(path=dest_dir, state="absent") @@ -1014,7 +1008,7 @@ def test_copy_uss_dir_to_existing_dir_forced(ansible_zos_module, copy_directory) @pytest.mark.parametrize("create_dest", [False, True]) def test_copy_local_nested_dir_to_uss(ansible_zos_module, create_dest): hosts = ansible_zos_module - dest_path = "/tmp/new_dir" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) source_path = tempfile.mkdtemp() if not source_path.endswith("/"): @@ -1060,8 +1054,8 @@ def test_copy_local_nested_dir_to_uss(ansible_zos_module, create_dest): @pytest.mark.parametrize("create_dest", [False, True]) def test_copy_uss_nested_dir_to_uss(ansible_zos_module, create_dest): hosts = ansible_zos_module - source_path = "/tmp/old_dir/" - dest_path = "/tmp/new_dir" + source_path = get_random_file_name(dir=TMP_DIRECTORY, suffix='/') + dest_path = get_random_file_name(dir=TMP_DIRECTORY) subdir_a_path = "{0}subdir_a".format(source_path) subdir_b_path = "{0}subdir_b".format(source_path) @@ -1108,7 +1102,7 @@ def test_copy_local_dir_and_change_mode(ansible_zos_module, copy_directory): source_path = "{0}/{1}".format(source_parent_path, source_basename) mode = "0755" - dest_path = "/tmp/new_dir" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) dest_profile = "{0}/profile".format(dest_path) dest_subdir = "{0}/{1}".format(dest_path, source_basename) if copy_directory: @@ -1195,11 +1189,11 @@ def test_copy_local_dir_and_change_mode(ansible_zos_module, copy_directory): def test_copy_uss_dir_and_change_mode(ansible_zos_module, copy_directory): hosts = ansible_zos_module - source_basename = "source" if copy_directory else "source/" - source_path = "/tmp/{0}".format(source_basename) + source_basename = get_random_file_name() if copy_directory else get_random_file_name(suffix='/') + source_path = "/{0}/{1}".format(TMP_DIRECTORY, source_basename) mode = "0755" - dest_path = "/tmp/new_dir" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) dest_subdir = "{0}/{1}".format(dest_path, source_basename) dest_profile = "{0}/profile".format(dest_path) if copy_directory: @@ -1285,43 +1279,47 @@ def test_copy_uss_dir_and_change_mode(ansible_zos_module, copy_directory): @pytest.mark.uss -@pytest.mark.parametrize("backup", [None, "/tmp/uss_backup"]) +@pytest.mark.parametrize("backup", [False, True]) def test_backup_uss_file(ansible_zos_module, backup): hosts = ansible_zos_module src = "/etc/profile" - dest = "/tmp/profile" + dest_dir = get_random_file_name(dir=TMP_DIRECTORY) + dest = f"{dest_dir}/profile" backup_name = None try: + hosts.all.file(path=dest_dir, state="directory") hosts.all.file(path=dest, state="touch") + if backup: - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) + backup_name = get_random_file_name(dir=TMP_DIRECTORY) + copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup_name) else: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True) for result in copy_res.contacted.values(): assert result.get("msg") is None - backup_name = result.get("backup_name") + backup_name_result = result.get("backup_name") if backup: - assert backup_name == backup + assert backup_name_result == backup_name else: - assert backup_name is not None + assert backup_name_result is not None - stat_res = hosts.all.stat(path=backup_name) + stat_res = hosts.all.stat(path=backup_name_result) for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: - hosts.all.file(path=dest, state="absent") - if backup_name: - hosts.all.file(path=backup_name, state="absent") + hosts.all.file(path=dest_dir, state="absent") + if backup_name_result: + hosts.all.file(path=backup_name_result, state="absent") @pytest.mark.uss def test_copy_file_insufficient_read_permission_fails(ansible_zos_module): hosts = ansible_zos_module - src_path = "/tmp/testfile" + src_path = get_random_file_name(dir=TMP_DIRECTORY) dest = "/tmp" try: open(src_path, "w").close() @@ -1340,7 +1338,7 @@ def test_copy_file_insufficient_read_permission_fails(ansible_zos_module): def test_copy_non_existent_file_fails(ansible_zos_module, is_remote): hosts = ansible_zos_module src_path = "/tmp/non_existent_src" - dest = "/tmp" + dest = TMP_DIRECTORY copy_res = hosts.all.zos_copy(src=src_path, dest=dest, remote_src=is_remote) for result in copy_res.contacted.values(): @@ -1353,7 +1351,7 @@ def test_copy_non_existent_file_fails(ansible_zos_module, is_remote): @pytest.mark.parametrize("encoding", ["utf-8", "iso8859-1"]) def test_copy_template_file(ansible_zos_module, encoding): hosts = ansible_zos_module - dest_path = "/tmp/new_dir" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) temp_dir = tempfile.mkdtemp() try: @@ -1414,7 +1412,7 @@ def test_copy_template_file(ansible_zos_module, encoding): @pytest.mark.template def test_copy_template_dir(ansible_zos_module): hosts = ansible_zos_module - dest_path = "/tmp/new_dir" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) # Ensuring there's a traling slash to copy the contents of the directory. temp_dir = os.path.normpath(tempfile.mkdtemp()) @@ -1501,7 +1499,7 @@ def test_copy_template_dir(ansible_zos_module): @pytest.mark.template def test_copy_template_file_with_non_default_markers(ansible_zos_module): hosts = ansible_zos_module - dest_path = "/tmp/new_dir" + dest_path = get_random_file_name(dir=TMP_DIRECTORY) temp_dir = tempfile.mkdtemp() try: @@ -1898,7 +1896,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): remote_src=False ) - dest = "/tmp/zos_copy_asa_test.txt" + dest = get_random_file_name(dir=TMP_DIRECTORY) copy_result = hosts.all.zos_copy( src=src, @@ -1933,7 +1931,7 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): dict(src="/etc/profile", is_remote=True),]) def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, src): hosts = ansible_zos_module - dest_path = "/tmp/test/" + dest_path = get_random_file_name(dir=TMP_DIRECTORY, suffix='/') mode = "750" other_mode = "744" mode_overwrite = "0777" @@ -1984,15 +1982,16 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): # copy text_in source hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.zos_copy(content=c_pgm, dest='/tmp/disp_shr/pdse-lock.c', force=True) + temp_dir = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', force=True) hosts.all.zos_copy( - content=call_c_jcl.format(dest_data_set), - dest='/tmp/disp_shr/call_c_pgm.jcl', + content=call_c_jcl.format(temp_dir, dest_data_set), + dest=f'{temp_dir}/call_c_pgm.jcl', force=True ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=f"{temp_dir}/") # submit jcl - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=f"{temp_dir}/") # pause to ensure c code acquires lock time.sleep(5) results = hosts.all.zos_copy( @@ -2028,7 +2027,7 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) # clean up c code/object/executable files, jcl - hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.shell(cmd=f'rm -r {temp_dir}') # remove pdse hosts.all.zos_data_set(name=data_set_1, state="absent") hosts.all.zos_data_set(name=data_set_2, state="absent") @@ -2176,7 +2175,7 @@ def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): @pytest.mark.seq def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module): hosts = ansible_zos_module - src = "/tmp/zos_copy_binary_file" + src = get_random_file_name(dir=TMP_DIRECTORY) dest = get_tmp_ds_name() try: @@ -2317,10 +2316,11 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): @pytest.mark.seq def test_copy_ps_to_non_existing_uss_file(ansible_zos_module): hosts = ansible_zos_module - src_ds = TEST_PS - dest = "/tmp/ddchkpt" + src_ds = get_tmp_ds_name() + dest = get_random_file_name(dir=TMP_DIRECTORY) try: + hosts.all.shell(cmd=f"decho '{DUMMY_DATA_SPECIAL_CHARS}' '{src_ds}' ") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True) stat_res = hosts.all.stat(path=dest) verify_copy = hosts.all.shell( @@ -2338,6 +2338,7 @@ def test_copy_ps_to_non_existing_uss_file(ansible_zos_module): assert result.get("stdout") != "" finally: hosts.all.file(path=dest, state="absent") + hosts.all.zos_data_set(name=src_ds, state="absent") @pytest.mark.uss @@ -2345,8 +2346,8 @@ def test_copy_ps_to_non_existing_uss_file(ansible_zos_module): @pytest.mark.parametrize("force", [False, True]) def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): hosts = ansible_zos_module - src_ds = TEST_PS - dest = "/tmp/ddchkpt" + src_ds = get_tmp_ds_name() + dest = get_random_file_name(dir=TMP_DIRECTORY) hosts = ansible_zos_module mlq_size = 3 @@ -2362,6 +2363,7 @@ def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): pgm2_mem_alias = "ALIAS2" try: hosts.all.file(path=dest, state="touch") + hosts.all.shell(cmd=f"decho 'test line' '{src_ds}' ") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) stat_res = hosts.all.stat(path=dest) @@ -2382,35 +2384,20 @@ def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): for result in verify_copy.contacted.values(): assert result.get("rc") == 0 finally: + hosts.all.shell(cmd=f"drm '{src_ds}' ") hosts.all.file(path=dest, state="absent") - else: - # copy src loadlib to dest library pds w/o aliases - copy_res = hosts.all.zos_copy( - src="{0}".format(src_lib), - dest="{0}".format(dest_lib), - remote_src=True, - executable=True, - aliases=False - ) - # copy src loadlib to dest library pds w aliases - copy_res_aliases = hosts.all.zos_copy( - src="{0}".format(src_lib), - dest="{0}".format(dest_lib_aliases), - remote_src=True, - executable=True, - aliases=True - ) @pytest.mark.uss @pytest.mark.seq def test_copy_ps_to_existing_uss_dir(ansible_zos_module): hosts = ansible_zos_module - src_ds = TEST_PS - dest = "/tmp/ddchkpt" - dest_path = dest + "/" + TEST_PS + src_ds = get_tmp_ds_name() + dest = get_random_file_name(dir=TMP_DIRECTORY) + dest_path = dest + "/" + src_ds try: + hosts.all.shell(cmd=f"decho 'test line' '{src_ds}' " ) hosts.all.file(path=dest, state="directory") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True) stat_res = hosts.all.stat(path=dest_path) @@ -2433,79 +2420,16 @@ def test_copy_ps_to_existing_uss_dir(ansible_zos_module): @pytest.mark.seq def test_copy_ps_to_non_existing_ps(ansible_zos_module): hosts = ansible_zos_module - src_ds = TEST_PS + src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, state="absent") + hosts.all.shell(cmd=f"decho 'this is a test line' '{src_ds}'") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) - # Copying the remote loadlibs in USS to a local dir. - # This section ONLY handles ONE host, so if we ever use multiple hosts to - # test, we will need to update this code. - remote_user = hosts["options"]["user"] - # Removing a trailing comma because the framework saves the hosts list as a - # string instead of a list. - remote_host = hosts["options"]["inventory"].replace(",", "") - - tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") - cmd = [ - "sftp", - "-r", - f"{remote_user}@{remote_host}:{uss_location}", - f"{tmp_folder.name}" - ] - with subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE) as sftp_proc: - result = sftp_proc.stdout.read() - - source_path = os.path.join(tmp_folder.name, os.path.basename(uss_location)) - - if not is_created: - # ensure dest data sets absent for this variation of the test case. - hosts.all.zos_data_set(name=dest_lib, state="absent") - else: - # allocate dest loadlib to copy over without an alias. - hosts.all.zos_data_set( - name=dest_lib, - state="present", - type="pdse", - record_format="u", - record_length=0, - block_size=32760, - space_primary=2, - space_type="m", - replace=True - ) - - if not is_created: - # dest data set does not exist, specify it in dest_dataset param. - # copy src loadlib to dest library pds w/o aliases - copy_res = hosts.all.zos_copy( - src=source_path, - dest="{0}".format(dest_lib), - executable=True, - aliases=False, - dest_data_set={ - 'type': "pdse", - 'record_format': "u", - 'record_length': 0, - 'block_size': 32760, - 'space_primary': 2, - 'space_type': "m", - } - ) - else: - # copy src loadlib to dest library pds w/o aliases - copy_res = hosts.all.zos_copy( - src=source_path, - dest="{0}".format(dest_lib), - executable=True, - aliases=False - ) - for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True @@ -2522,11 +2446,12 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): @pytest.mark.parametrize("force", [False, True]) def test_copy_ps_to_empty_ps(ansible_zos_module, force): hosts = ansible_zos_module - src_ds = TEST_PS + src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="present") + hosts.all.shell(cmd=f"decho 'test line ' '{src_ds}'") + hosts.all.shell(cmd=f"dtouch -tseq '{src_ds}'") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2548,12 +2473,12 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): @pytest.mark.parametrize("force", [False, True]) def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): hosts = ansible_zos_module - src_ds = TEST_PS + src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") - hosts.all.zos_copy(content="Inline content", dest=dest) + hosts.all.shell(cmd=f"decho 'This is a test ' '{src_ds}' ") + hosts.all.shell(cmd=f"decho 'This is a test ' '{dest}' ") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2572,7 +2497,6 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): assert result.get("rc") == 0 assert result.get("stdout") != "" finally: - hosts.all.shell(cmd='rm -r /tmp/c') hosts.all.zos_data_set(name=dest, state="absent") @@ -2580,12 +2504,12 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): @pytest.mark.parametrize("force", [False, True]) def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): hosts = ansible_zos_module - src_ds = TEST_PS + src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() try: - hosts.all.zos_data_set(name=dest, type="seq", state="absent") - hosts.all.zos_copy(content=DUMMY_DATA_SPECIAL_CHARS, dest=dest) + hosts.all.shell(cmd=f"decho '{DUMMY_DATA_SPECIAL_CHARS}' '{src_ds}' ") + hosts.all.shell(cmd=f"decho '{DUMMY_DATA_SPECIAL_CHARS}' '{dest}' ") copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) verify_copy = hosts.all.shell( @@ -2608,7 +2532,7 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): @pytest.mark.seq -@pytest.mark.parametrize("backup", [None, "USER.TEST.SEQ.FUNCTEST.BACK"]) +@pytest.mark.parametrize("backup", [None, True]) def test_backup_sequential_data_set(ansible_zos_module, backup): hosts = ansible_zos_module src = "/etc/profile" @@ -2618,17 +2542,20 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): hosts.all.zos_data_set(name=dest, type="seq", state="present") if backup: - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup) + backup_name = get_tmp_ds_name() + copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup_name) else: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True) for result in copy_res.contacted.values(): assert result.get("msg") is None - backup_name = result.get("backup_name") - assert backup_name is not None + assert result.get("backup_name") is not None + result_backup_name = result.get("backup_name") + if backup: + assert backup_name == result.get("backup_name") stat_res = hosts.all.shell( - cmd="tsocmd \"LISTDS '{0}'\"".format(backup_name), + cmd="tsocmd \"LISTDS '{0}'\"".format(result_backup_name), executable=SHELL_EXECUTABLE, ) for result in stat_res.contacted.values(): @@ -2638,7 +2565,7 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): finally: hosts.all.zos_data_set(name=dest, state="absent") - if backup_name: + if backup: hosts.all.zos_data_set(name=backup_name, state="absent") @@ -2912,7 +2839,7 @@ def test_copy_file_to_non_existing_pdse(ansible_zos_module, is_remote): @pytest.mark.pdse def test_copy_dir_to_non_existing_pdse(ansible_zos_module): hosts = ansible_zos_module - src_dir = "/tmp/testdir" + src_dir = get_random_file_name(dir=TMP_DIRECTORY) dest = get_tmp_ds_name() try: @@ -2976,7 +2903,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): @pytest.mark.parametrize("src_type", ["pds", "pdse"]) def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): hosts = ansible_zos_module - src_dir = "/tmp/testdir" + src_dir = get_random_file_name(dir=TMP_DIRECTORY) dest = get_tmp_ds_name() try: @@ -3240,7 +3167,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): dest_lib_aliases = get_tmp_ds_name(mlq_s) pgm_mem_alias = "ALIAS1" - uss_dest = "/tmp/HELLO" + uss_dest = get_random_file_name(dir=TMP_DIRECTORY) try: # allocate data sets hosts.all.zos_data_set( @@ -3580,7 +3507,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): dest_lib = get_tmp_ds_name(mlq_s) pgm_mem = "HELLO" pgm2_mem = "HELLO2" - uss_location = "/tmp/loadlib" + uss_location = get_random_file_name(dir=TMP_DIRECTORY) try: @@ -3743,7 +3670,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): pgm2_mem_alias = "ALIAS2" # note - aliases for executables are implicitly copied over (by module design) for USS targets. - uss_dir_path = '/tmp/uss-loadlib/' + uss_dir_path = get_random_file_name(dir=TMP_DIRECTORY) try: # allocate pds for cobol src code @@ -3922,19 +3849,20 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): @pytest.mark.uss def test_copy_executables_uss_to_uss(ansible_zos_module): hosts= ansible_zos_module - src= "/tmp/c/hello_world.c" - src_jcl_call= "/tmp/c/call_hw_pgm.jcl" - dest_uss="/tmp/c/hello_world_2" + c_dir = get_random_file_name(dir=TMP_DIRECTORY) + src= f"{c_dir}/hello_world.c" + src_jcl_call= f"{c_dir}/call_hw_pgm.jcl" + dest_uss=f"{c_dir}/hello_world_2" try: - generate_executable_uss(hosts, src, src_jcl_call) + generate_executable_uss(hosts, c_dir, src, src_jcl_call) copy_uss_res = hosts.all.zos_copy( - src="/tmp/c/hello_world", + src=f"{c_dir}/hello_world", dest=dest_uss, remote_src=True, executable=True, force=True ) - verify_exe_dst = hosts.all.shell(cmd="/tmp/c/hello_world_2") + verify_exe_dst = hosts.all.shell(cmd=f"{c_dir}/hello_world_2") for result in copy_uss_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True @@ -3943,7 +3871,7 @@ def test_copy_executables_uss_to_uss(ansible_zos_module): stdout = res.get("stdout") assert "Hello World" in str(stdout) finally: - hosts.all.shell(cmd='rm -r /tmp/c') + hosts.all.shell(cmd=f'rm -r {c_dir}') @pytest.mark.pdse @@ -3951,13 +3879,14 @@ def test_copy_executables_uss_to_uss(ansible_zos_module): @pytest.mark.parametrize("is_created", ["true", "false"]) def test_copy_executables_uss_to_member(ansible_zos_module, is_created): hosts= ansible_zos_module - src= "/tmp/c/hello_world.c" + c_dir = get_random_file_name(dir=TMP_DIRECTORY) + src= f"{c_dir}/hello_world.c" mlq_size = 3 - src_jcl_call= "/tmp/c/call_hw_pgm.jcl" + src_jcl_call= f"{c_dir}/call_hw_pgm.jcl" dest = get_tmp_ds_name(mlq_size) member = "HELLOSRC" try: - generate_executable_uss(hosts, src, src_jcl_call) + generate_executable_uss(hosts, c_dir, src, src_jcl_call) if is_created: hosts.all.zos_data_set( name=dest, @@ -3971,7 +3900,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): replace=True ) copy_uss_to_mvs_res = hosts.all.zos_copy( - src="/tmp/c/hello_world", + src=f"{c_dir}/hello_world", dest="{0}({1})".format(dest, member), remote_src=True, executable=True, @@ -3989,7 +3918,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): stdout = res.get("stdout") assert "Hello World" in str(stdout) finally: - hosts.all.shell(cmd='rm -r /tmp/c') + hosts.all.shell(cmd=f'rm -r {c_dir}') hosts.all.zos_data_set(name=dest, state="absent") @@ -4134,7 +4063,7 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): hosts = ansible_zos_module data_set = get_tmp_ds_name() src = "{0}(MEMBER)".format(data_set) - dest = "/tmp/member" + dest = get_random_file_name(dir=TMP_DIRECTORY) try: hosts.all.file(path=dest, state="absent") @@ -4176,7 +4105,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module data_set = get_tmp_ds_name() src = "{0}(MEMBER)".format(data_set) - dest = "/tmp/member" + dest = get_random_file_name(dir=TMP_DIRECTORY) try: hosts.all.file(path=dest, state="touch") @@ -4218,8 +4147,8 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() - dest = "/tmp/" - dest_path = "/tmp/{0}".format(src_ds) + dest = get_random_file_name(dir=TMP_DIRECTORY) + dest_path = "{0}/{1}".format(dest, src_ds) try: hosts.all.zos_data_set(name=src_ds, type=src_type, state="present") @@ -4253,7 +4182,7 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): assert result.get("stat").get("isdir") is True finally: hosts.all.zos_data_set(name=src_ds, state="absent") - hosts.all.file(path=dest_path, state="absent") + hosts.all.file(path=dest, state="absent") @pytest.mark.uss @@ -4264,8 +4193,8 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): hosts = ansible_zos_module src_ds = get_tmp_ds_name() src = "{0}(MEMBER)".format(src_ds) - dest = "/tmp/" - dest_path = "/tmp/MEMBER" + dest = get_random_file_name(dir=TMP_DIRECTORY, suffix='/') + dest_path = f"{dest}MEMBER" try: hosts.all.zos_data_set(name=src_ds, type=src_type, state="present") @@ -4282,6 +4211,7 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): assert result.get("changed") is False assert error_msg in result.get("msg") + hosts.all.file(path=dest, state="directory") copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True) stat_res = hosts.all.stat(path=dest_path) verify_copy = hosts.all.shell( @@ -4299,8 +4229,8 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): assert result.get("rc") == 0 assert result.get("stdout") != "" finally: - hosts.all.zos_data_set(name=src_ds, state="absent") - hosts.all.file(path=dest_path, state="absent") + # hosts.all.zos_data_set(name=src_ds, state="absent") + hosts.all.file(path=dest, state="absent") @pytest.mark.seq @@ -4429,10 +4359,10 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", backup=None), - dict(type="pds", backup="USER.TEST.PDS.BACKUP"), - dict(type="pdse", backup=None), - dict(type="pdse", backup="USER.TEST.PDSE.BACKUP"), + dict(type="pds", backup=False), + dict(type="pds", backup=True), + dict(type="pdse", backup=False), + dict(type="pdse", backup=True), ]) def test_backup_pds(ansible_zos_module, args): hosts = ansible_zos_module @@ -4446,7 +4376,8 @@ def test_backup_pds(ansible_zos_module, args): populate_partitioned_data_set(hosts, dest, args["type"], members) if args["backup"]: - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=args["backup"]) + backup_name = get_tmp_ds_name() + copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup_name) else: copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True) @@ -4455,12 +4386,12 @@ def test_backup_pds(ansible_zos_module, args): assert result.get("changed") is True assert result.get("dest") == dest - backup_name = result.get("backup_name") - assert backup_name is not None + result_backup_name = result.get("backup_name") + assert result_backup_name is not None if args["backup"]: - assert backup_name == args["backup"] + assert result_backup_name == backup_name - verify_copy = get_listcat_information(hosts, backup_name, args["type"]) + verify_copy = get_listcat_information(hosts, result_backup_name, args["type"]) for result in verify_copy.contacted.values(): assert result.get("dd_names") is not None @@ -4525,8 +4456,9 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ @pytest.mark.vsam def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): hosts = ansible_zos_module - src_ds = TEST_VSAM_KSDS + src_ds = get_tmp_ds_name() dest_ds = get_tmp_ds_name() + create_vsam_data_set(hosts, src_ds, "ksds", add_data=True, key_length=12, key_offset=0) try: copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True) @@ -4544,7 +4476,13 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): assert "IN-CAT" in output assert re.search(r"\bINDEXED\b", output) finally: - hosts.all.zos_data_set(name=dest_ds, state="absent") + hosts.all.zos_data_set( + batch=[ + {"name": dest_ds, "state": "absent"}, + {"name": src_ds, "state": "absent"} + ] + ) + @pytest.mark.vsam @pytest.mark.parametrize("force", [False, True]) @@ -4582,7 +4520,7 @@ def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): @pytest.mark.vsam -@pytest.mark.parametrize("backup", [None, "USER.TEST.VSAM.KSDS.BACK"]) +@pytest.mark.parametrize("backup", [False, True]) def test_backup_ksds(ansible_zos_module, backup): hosts = ansible_zos_module src = get_tmp_ds_name() @@ -4594,21 +4532,22 @@ def test_backup_ksds(ansible_zos_module, backup): create_vsam_data_set(hosts, dest, "ksds", add_data=True, key_length=12, key_offset=0) if backup: - copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, backup_name=backup, remote_src=True, force=True) + backup_name = get_tmp_ds_name() + copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, backup_name=backup_name, remote_src=True, force=True) else: copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, remote_src=True, force=True) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True - backup_name = result.get("backup_name") - assert backup_name is not None + result_backup_name = result.get("backup_name") + assert result_backup_name is not None if backup: - assert backup_name == backup + assert result_backup_name == backup_name verify_copy = get_listcat_information(hosts, dest, "ksds") - verify_backup = get_listcat_information(hosts, backup_name, "ksds") + verify_backup = get_listcat_information(hosts, result_backup_name, "ksds") for result in verify_copy.contacted.values(): assert result.get("dd_names") is not None @@ -4635,7 +4574,8 @@ def test_backup_ksds(ansible_zos_module, backup): @pytest.mark.vsam def test_copy_ksds_to_volume(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - src_ds = TEST_VSAM_KSDS + src_ds = get_tmp_ds_name() + create_vsam_data_set(hosts, src_ds, "ksds", add_data=True, key_length=12, key_offset=0) dest_ds = get_tmp_ds_name() volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() @@ -4662,7 +4602,12 @@ def test_copy_ksds_to_volume(ansible_zos_module, volumes_on_systems): assert re.search(r"\bINDEXED\b", output) assert re.search(r"\b{0}\b".format(volume_1), output) finally: - hosts.all.zos_data_set(name=dest_ds, state="absent") + hosts.all.zos_data_set( + batch=[ + {"name": dest_ds, "state": "absent"}, + {"name": src_ds, "state": "absent"} + ] + ) def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): @@ -4793,11 +4738,11 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( @pytest.mark.parametrize("options", [ - dict(src="/etc/profile", dest="/tmp/zos_copy_test_profile", + dict(src="/etc/profile", force=True, is_remote=False, verbosity="-vvvvv", verbosity_level=5), - dict(src="/etc/profile", dest="/mp/zos_copy_test_profile", force=True, + dict(src="/etc/profile", force=True, is_remote=False, verbosity="-vvvv", verbosity_level=4), - dict(src="/etc/profile", dest="/tmp/zos_copy_test_profile", + dict(src="/etc/profile", force=True, is_remote=False, verbosity="", verbosity_level=0), ]) def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): @@ -4813,8 +4758,9 @@ def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): node = hosts["options"]["inventory"].rstrip(',') python_path = hosts["options"]["ansible_python_path"] + dest_path = get_random_file_name(dir=TMP_DIRECTORY) # This is an adhoc command, because there was no - cmd = "ansible all -i " + str(node) + ", -u " + user + " -m ibm.ibm_zos_core.zos_copy -a \"src=" + options["src"] + " dest=" + options["dest"] + " is_remote=" + str( + cmd = "ansible all -i " + str(node) + ", -u " + user + " -m ibm.ibm_zos_core.zos_copy -a \"src=" + options["src"] + " dest=" + dest_path + " is_remote=" + str( options["is_remote"]) + " encoding={{enc}} \" -e '{\"enc\":{\"from\": \"ISO8859-1\", \"to\": \"IBM-1047\"}}' -e \"ansible_python_interpreter=" + python_path + "\" " + options["verbosity"] + "" result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout @@ -4826,7 +4772,7 @@ def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): assert ("play context verbosity:" not in output) finally: - hosts.all.file(path=options["dest"], state="absent") + hosts.all.file(path=dest_path, state="absent") @pytest.mark.parametrize("generation", ["0", "+1"]) @@ -5226,7 +5172,7 @@ def test_copy_gdg_to_uss_dir(ansible_zos_module): try: src_data_set = get_tmp_ds_name() - dest = "/tmp/zos_copy_gdg" + dest = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.shell(cmd=f"dtouch -tGDG -L3 {src_data_set}") hosts.all.shell(cmd=f"""dtouch -tSEQ "{src_data_set}(+1)" """) diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index f696d821a..ede95e646 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -14,16 +14,15 @@ from __future__ import absolute_import, division, print_function from os import path from shellescape import quote -from datetime import datetime # pylint: disable-next=import-error from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name import pytest -import string -import random import re +from ibm_zos_core.tests.helpers.utils import get_random_file_name __metaclass__ = type +TMP_DIRECTORY = "/tmp/" USS_NONE_FILE = "/tmp/none" SHELL_EXECUTABLE = "/bin/sh" FROM_ENCODING = "IBM-1047" @@ -79,11 +78,6 @@ """ -def get_unique_uss_file_name(): - unique_str = "EN" + datetime.now().strftime("%H:%M:%S").replace("-", "").replace(":", "") + "CODE" + random.choice(string.ascii_letters) - return "/tmp/{0}".format(unique_str) - - def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, key_offset=None): """Creates a new VSAM on the system. @@ -119,7 +113,7 @@ def create_vsam_data_set(hosts, name, ds_type, add_data=False, key_length=None, def test_uss_encoding_conversion_with_invalid_encoding(ansible_zos_module): hosts = ansible_zos_module - uss_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts.all.copy(content=TEST_DATA, dest=uss_file) results = hosts.all.zos_encode( @@ -139,7 +133,7 @@ def test_uss_encoding_conversion_with_invalid_encoding(ansible_zos_module): def test_uss_encoding_conversion_with_the_same_encoding(ansible_zos_module): hosts = ansible_zos_module - uss_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') hosts.all.copy(content=TEST_DATA, dest=uss_file) results = hosts.all.zos_encode( src=uss_file, @@ -156,7 +150,7 @@ def test_uss_encoding_conversion_with_the_same_encoding(ansible_zos_module): def test_uss_encoding_conversion_without_dest(ansible_zos_module): - uss_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module hosts.all.copy(content=TEST_DATA, dest=uss_file) @@ -181,7 +175,7 @@ def test_uss_encoding_conversion_without_dest(ansible_zos_module): def test_uss_encoding_conversion_when_dest_not_exists_01(ansible_zos_module): - uss_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module hosts.all.copy(content=TEST_DATA, dest=uss_file) @@ -233,8 +227,8 @@ def test_uss_encoding_conversion_when_dest_not_exists_02(ansible_zos_module): def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): - uss_file = get_unique_uss_file_name() - uss_dest_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') + uss_dest_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module hosts.all.copy(content=TEST_DATA, dest=uss_file) @@ -262,8 +256,8 @@ def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): def test_uss_encoding_conversion_uss_file_to_uss_path(ansible_zos_module): - uss_file = get_unique_uss_file_name() - uss_dest_path = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') + uss_dest_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module hosts.all.file(path=uss_dest_path, state="directory") @@ -293,8 +287,8 @@ def test_uss_encoding_conversion_uss_file_to_uss_path(ansible_zos_module): def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): try: hosts = ansible_zos_module - uss_path = get_unique_uss_file_name() - uss_dest_path = get_unique_uss_file_name() + uss_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') + uss_dest_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') hosts.all.file(path=uss_path, state="directory") hosts.all.copy(content=TEST_DATA, dest=uss_path + "/encode1") hosts.all.copy(content=TEST_DATA, dest=uss_path + "/encode2") @@ -326,7 +320,7 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): - uss_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() @@ -351,7 +345,7 @@ def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): - uss_dest_file = get_unique_uss_file_name() + uss_dest_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() @@ -383,7 +377,7 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): - uss_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() @@ -410,7 +404,7 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): - uss_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() @@ -448,7 +442,7 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): - uss_dest_file = get_unique_uss_file_name() + uss_dest_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() @@ -492,8 +486,8 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): try: hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() - uss_path = get_unique_uss_file_name() - uss_dest_path = get_unique_uss_file_name() + uss_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') + uss_dest_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') hosts.all.file(path=uss_path, state="directory") hosts.all.copy(content=TEST_DATA, dest=uss_path + "/encode1") hosts.all.copy(content=TEST_DATA, dest=uss_path + "/encode2") @@ -571,8 +565,8 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): - uss_file = get_unique_uss_file_name() - temp_jcl_path = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') + temp_jcl_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module mvs_vs = get_tmp_ds_name(3) @@ -609,7 +603,7 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): - uss_dest_file = get_unique_uss_file_name() + uss_dest_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module mlq_size = 3 @@ -707,7 +701,7 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): hosts = ansible_zos_module mvs_vs = get_tmp_ds_name(3) mvs_ps = get_tmp_ds_name() - temp_jcl_path = get_unique_uss_file_name() + temp_jcl_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") hosts.all.file(path=temp_jcl_path, state="directory") hosts.all.shell( @@ -772,7 +766,7 @@ def test_pds_backup(ansible_zos_module): hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() backup_data_set = get_tmp_ds_name() - temp_jcl_path = get_unique_uss_file_name() + temp_jcl_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') hosts.all.zos_data_set(name=backup_data_set, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="present", type="pds") @@ -802,7 +796,7 @@ def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() backup_data_set = get_tmp_ds_name() - temp_jcl_path = get_unique_uss_file_name() + temp_jcl_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') tmphlq = "TMPHLQ" hosts.all.zos_data_set(name=backup_data_set, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -838,7 +832,7 @@ def test_ps_backup(ansible_zos_module): hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() backup_data_set = get_tmp_ds_name() - temp_jcl_path = get_unique_uss_file_name() + temp_jcl_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') hosts.all.zos_data_set(name=backup_data_set, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") @@ -868,7 +862,7 @@ def test_vsam_backup(ansible_zos_module): backup_data_set = get_tmp_ds_name() mvs_vs = get_tmp_ds_name() mvs_ps = get_tmp_ds_name() - temp_jcl_path = get_unique_uss_file_name() + temp_jcl_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') hosts.all.zos_data_set(name=backup_data_set, state="absent") hosts.all.zos_data_set(name=mvs_vs, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -932,7 +926,7 @@ def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module) hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() backup_data_set = get_tmp_ds_name() - temp_jcl_path = get_unique_uss_file_name() + temp_jcl_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') hosts.all.zos_data_set(name=backup_data_set, state="absent") # create and fill PDS hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -999,7 +993,7 @@ def test_uss_backup_entire_folder_to_default_backup_location_compressed( hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() backup_data_set = get_tmp_ds_name() - temp_jcl_path = get_unique_uss_file_name() + temp_jcl_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') hosts.all.zos_data_set(name=backup_data_set, state="absent") # create and fill PDS hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -1131,7 +1125,7 @@ def test_gdg_encoding_conversion_invalid_gdg(ansible_zos_module): for result in results.contacted.values(): assert result.get("msg") is not None - assert "not supported" in result.get("msg") + assert "Encoding of a whole generation data group is not supported." in result.get("msg") assert result.get("backup_name") is None assert result.get("changed") is False assert result.get("failed") is True @@ -1141,7 +1135,7 @@ def test_gdg_encoding_conversion_invalid_gdg(ansible_zos_module): def test_encoding_conversion_gds_to_uss_file(ansible_zos_module): - uss_dest_file = get_unique_uss_file_name() + uss_dest_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module ds_name = get_tmp_ds_name() @@ -1227,7 +1221,7 @@ def test_encoding_conversion_gds_no_dest(ansible_zos_module): def test_encoding_conversion_uss_file_to_gds(ansible_zos_module): - uss_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='EN') try: hosts = ansible_zos_module ds_name = get_tmp_ds_name() diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 45ca96dd5..5a397399b 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -32,10 +32,11 @@ from ibm_zos_core.tests.helpers.volumes import Volume_Handler # pylint: disable-next=import-error from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.utils import get_random_file_name __metaclass__ = type - +TMP_DIRECTORY = '/tmp/' DUMMY_DATA = """DUMMY DATA == LINE 01 == DUMMY DATA == LINE 02 == DUMMY DATA == LINE 03 == @@ -86,10 +87,6 @@ 00000003A record """ -def get_unique_uss_file_name(): - unique_str = "EN" + datetime.now().strftime("%H:%M:%S").replace("-", "").replace(":", "") + "CODE" + random.choice(string.ascii_letters) - return "/tmp/{0}".format(unique_str) - def extract_member_name(data_set): start = data_set.find("(") @@ -296,12 +293,12 @@ def test_fetch_partitioned_data_set(ansible_zos_module): def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module - temp_jcl_path = get_unique_uss_file_name() + temp_jcl_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='FE') test_vsam = get_tmp_ds_name() dest_path = "/tmp/" + test_vsam volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() - uss_file = get_unique_uss_file_name() + uss_file = get_random_file_name(dir=TMP_DIRECTORY, prefix='FE') try: # start by creating the vsam dataset (could use a helper instead? ) hosts.all.file(path=temp_jcl_path, state="directory") @@ -522,7 +519,7 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): record_format="fba", record_length=25, ) - dest_path = get_unique_uss_file_name() + dest_path = get_random_file_name(dir=TMP_DIRECTORY, prefix='FE') hosts.all.zos_data_set(name=pds_name, type="pds") hosts.all.zos_data_set(name=pds_name + "(MYDATA)", type="member", replace="yes") params = { diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index ffd920259..481b30cc2 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -18,14 +18,13 @@ import tempfile import re import os -import string -import random from shellescape import quote import pytest from datetime import datetime from ibm_zos_core.tests.helpers.volumes import Volume_Handler from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.utils import get_random_file_name # ############################################################################## # Configure the job card as needed, most common keyword parameters: @@ -35,7 +34,7 @@ # printed in the job's output listing (SYSOUT). # MSGCLASS: assign an output class for your output listing (SYSOUT) # ############################################################################## - +TMP_DIRECTORY = "/tmp/" JCL_FILE_CONTENTS = """//* //****************************************************************************** //* Happy path job that prints hello world, returns RC 0 as is. @@ -401,9 +400,6 @@ // """ -def get_unique_uss_file_name(): - unique_str = "n" + datetime.now().strftime("%H:%M:%S").replace("-", "").replace(":", "") + ".dzp" - return "/tmp/{0}".format(unique_str) @pytest.mark.parametrize( "location", [ @@ -426,7 +422,7 @@ def test_job_submit_pds(ansible_zos_module, location): results = None hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) @@ -460,7 +456,7 @@ def test_job_submit_pds(ansible_zos_module, location): def test_job_submit_pds_special_characters(ansible_zos_module): try: hosts = ansible_zos_module - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) data_set_name_special_chars = get_tmp_ds_name(symbols=True) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( @@ -493,7 +489,7 @@ def test_job_submit_pds_special_characters(ansible_zos_module): def test_job_submit_uss(ansible_zos_module): try: hosts = ansible_zos_module - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) @@ -552,7 +548,7 @@ def test_job_submit_pds_volume(ansible_zos_module, volumes_on_systems): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() hosts.all.file(path=temp_path, state="directory") @@ -591,7 +587,7 @@ def test_job_submit_pds_5_sec_job_wait_15(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.file(path=temp_path, state="directory") wait_time_s = 15 @@ -625,7 +621,7 @@ def test_job_submit_pds_30_sec_job_wait_60(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.file(path=temp_path, state="directory") wait_time_s = 60 @@ -659,7 +655,7 @@ def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.file(path=temp_path, state="directory") wait_time_s = 10 @@ -835,7 +831,7 @@ def test_job_submit_jinja_template(ansible_zos_module, args): def test_job_submit_full_input(ansible_zos_module): try: hosts = ansible_zos_module - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( cmd=f"echo {quote(JCL_FULL_INPUT)} > {temp_path}/SAMPLE" @@ -988,7 +984,7 @@ def test_job_from_gdg_source(ansible_zos_module, generation): try: # Creating a GDG for the test. source = get_tmp_ds_name() - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) gds_name = f"{source}({generation})" hosts.all.zos_data_set(name=source, state="present", type="gdg", limit=3) hosts.all.zos_data_set(name=f"{source}(+1)", state="present", type="seq") @@ -1063,7 +1059,7 @@ def test_inexistent_positive_gds(ansible_zos_module): def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): try: hosts = ansible_zos_module - temp_path = get_unique_uss_file_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) # Copy C source and compile it. hosts.all.file(path=temp_path, state="directory") hosts.all.shell( diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py new file mode 100644 index 000000000..7ec112f18 --- /dev/null +++ b/tests/helpers/utils.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The helpers/utils.py file contains various utility functions that, +while not fitting into a specific standalone helper module, +are used across multiple test suites and can be reused from one util. +""" +from datetime import datetime + +def get_random_file_name(prefix="", suffix="", dir=""): + """ + Returns a randomly generated USS file name with options to have a specific suffix and prefix + in it. By default, returns a 8 numeric character name generated by the current seconds + milliseconds + from the local system. + + It is not guaranteed that the file name won't exist in the remote node, so this function is intended + for naming temporary files, where the responsibility for creating and deleting it is left to the function + caller. + + Parameters + ---------- + prefix : str + Prefix for the temporary file name. + suffix : str + Suffix for the temporary file name, it can be an extension e.g. '.dzp'. + dir : str + Parent temporary folder structure e.g. /tmp/temporary-folder/ + """ + if len(dir) > 0 and not dir.endswith('/'): + dir += '/' + + return dir + prefix + datetime.now().strftime("%S%f") + suffix \ No newline at end of file From 70c8831ba9affc196b3cfbd1b81e4209e681f55d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Wed, 28 Aug 2024 10:06:58 -0600 Subject: [PATCH 459/495] [Enhancement][117]Validate_to_restore_keep_orginial_hlq (#1632) * First iteration * Add test case * Change test cases * Fix * Avoid bad use of execption * Validate case * Restore * Add fragment * Fix Sanity * Update zos_backup_restore.py * Fixes default and new behaviour * Fix ansible lint * Correct test and logic * Modify fragment * Change N for R and K * Change work * Fix typos * Change logic * Changes Ansible-lint * Change behaviour * Delete case * Remove erros * Restore test * Change documentation * Fix tmphlq * Fix restore backpu * Delete remmants * Fix documentation * Update documentation * Backup restore * Fix comment * Fix comments * Update 1632-Validate_to_restore_keep_orginial_hlq.yml Updated description a bit and fixed grammar. --------- Co-authored-by: Demetri <dimatos@gmail.com> --- ...-Validate_to_restore_keep_orginial_hlq.yml | 5 +++ plugins/modules/zos_backup_restore.py | 44 +++++-------------- .../modules/test_zos_backup_restore.py | 35 ++++++++++++++- 3 files changed, 51 insertions(+), 33 deletions(-) create mode 100644 changelogs/fragments/1632-Validate_to_restore_keep_orginial_hlq.yml diff --git a/changelogs/fragments/1632-Validate_to_restore_keep_orginial_hlq.yml b/changelogs/fragments/1632-Validate_to_restore_keep_orginial_hlq.yml new file mode 100644 index 000000000..1bd6b4d6a --- /dev/null +++ b/changelogs/fragments/1632-Validate_to_restore_keep_orginial_hlq.yml @@ -0,0 +1,5 @@ +minor_changes: + - zos_backup_restore - Redefines the default behavior of module option `hlq`. + When option `operation` is set to `restore` and the `hlq` is not provided, + the original high level qualifiers in a backup will be used for a restore. + (https://github.com/ansible-collections/ibm_zos_core/pull/1632). diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index cd3f4b72a..00d48a62d 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -186,15 +186,13 @@ hlq: description: - Specifies the new HLQ to use for the data sets being restored. - - Defaults to running user's username. + - If no value is provided, the data sets will be restored with their original HLQs. type: str required: false tmp_hlq: description: - - Override the default high level qualifier (HLQ) for temporary and backup - data sets. - - The default HLQ is the Ansible user that executes the module and if - that is not available, then the value of C(TMPHLQ) is used. + - Override the default high level qualifier (HLQ) for temporary data sets. + - If original HLQ is not available, then the value of C(TMPHLQ) is used. required: false type: str notes: @@ -282,8 +280,8 @@ space: 1 space_type: g -- name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. - Use z/OS username as new HLQ. +- name: Restore data sets from a backup stored in the UNIX file /tmp/temp_backup.dzp. + Restore the data sets with the original high level qualifiers. zos_backup_restore: operation: restore backup_name: /tmp/temp_backup.dzp @@ -511,7 +509,7 @@ def parse_and_validate_args(params): overwrite=dict(type="bool", default=False), sms_storage_class=dict(type=sms_type, required=False), sms_management_class=dict(type=sms_type, required=False), - hlq=dict(type=hlq_type, default=hlq_default, dependencies=["operation"]), + hlq=dict(type=hlq_type, default=None, dependencies=["operation"]), tmp_hlq=dict(type=hlq_type, required=False), ) @@ -745,28 +743,7 @@ def hlq_type(contents, dependencies): raise ValueError("hlq_type is only valid when operation=restore.") if not match(r"^(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})$", contents, IGNORECASE): raise ValueError("Invalid argument {0} for hlq_type.".format(contents)) - return contents.upper() - - -def hlq_default(contents, dependencies): - """Sets the default HLQ to use if none is provided. - - Parameters - ---------- - contents : str - The HLQ to use. - dependencies : dict - Any dependent arguments. - - Returns - ------- - str - The HLQ to use. - """ - hlq = None - if dependencies.get("operation") == "restore": - hlq = datasets.get_hlq() - return hlq + return contents def sms_type(contents, dependencies): @@ -1018,11 +995,14 @@ def to_dunzip_args(**kwargs): size += kwargs.get("space_type") zoau_args["size"] = size - if kwargs.get("hlq"): + if kwargs.get("hlq") is None: + zoau_args["keep_original_hlq"] = True + else: zoau_args["high_level_qualifier"] = kwargs.get("hlq") if kwargs.get("tmp_hlq"): - zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq")) + zoau_args["high_level_qualifier"] = str(kwargs.get("tmp_hlq")) + zoau_args["keep_original_hlq"] = False return zoau_args diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index e74fc0aa4..52e9040e5 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -440,11 +440,14 @@ def test_backup_and_restore_of_data_set_when_restore_location_exists( def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): + hlqs = [] hosts = ansible_zos_module data_set_name = get_tmp_ds_name() data_set_name2 = get_tmp_ds_name() data_set_include = [data_set_name, data_set_name2] data_set_backup_location = get_tmp_ds_name(1, 1) + new_hlq = get_random_q() + hlqs.append(new_hlq) try: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) @@ -467,13 +470,14 @@ def test_backup_and_restore_of_multiple_data_sets(ansible_zos_module): backup_name=data_set_backup_location, overwrite=True, recover=True, + hlq=new_hlq, ) assert_module_did_not_fail(results) finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, data_set_name2) delete_data_set_or_file(hosts, data_set_backup_location) - delete_remnants(hosts) + delete_remnants(hosts, hlqs) def test_backup_and_restore_of_multiple_data_sets_by_hlq(ansible_zos_module): @@ -693,6 +697,35 @@ def test_restore_of_data_set_when_volume_does_not_exist(ansible_zos_module): delete_remnants(hosts, hlqs) +def test_backup_and_restore_a_data_set_with_same_hlq(ansible_zos_module): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_backup_location = get_tmp_ds_name() + try: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_backup_location) + hosts.all.shell(cmd="""decho "HELLO WORLD" {0}""".format(data_set_name)) + results = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=data_set_name), + backup_name=data_set_backup_location, + ) + delete_data_set_or_file(hosts, data_set_name) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_backup_location) + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=data_set_backup_location, + ) + assert_module_did_not_fail(results) + # Check the HLQ in the response + assert_data_set_or_file_exists(hosts, data_set_name) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_backup_location) + delete_remnants(hosts) + + # def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module): # hosts = ansible_zos_module # try: From 19fb4c4effa2e95af57959dcad3bea29d813fbbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 30 Aug 2024 17:35:26 -0600 Subject: [PATCH 460/495] [Enabler][1591]zos_job_query_portability (#1677) * Fix harcoded job_id * Add fragment * Fix import --- .../1677-zos_job_query_portability.yaml | 3 ++ .../modules/test_zos_job_query_func.py | 50 ++++++++++++++----- 2 files changed, 41 insertions(+), 12 deletions(-) create mode 100644 changelogs/fragments/1677-zos_job_query_portability.yaml diff --git a/changelogs/fragments/1677-zos_job_query_portability.yaml b/changelogs/fragments/1677-zos_job_query_portability.yaml new file mode 100644 index 000000000..799721b28 --- /dev/null +++ b/changelogs/fragments/1677-zos_job_query_portability.yaml @@ -0,0 +1,3 @@ +trivial: + - zos_copy - Remove the use of hard coded job id for test cases. + (https://github.com/ansible-collections/ibm_zos_core/pull/1667). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 525d61664..6999737e4 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -15,7 +15,6 @@ __metaclass__ = type -import tempfile import ansible.constants import ansible.errors import ansible.utils @@ -23,6 +22,26 @@ from shellescape import quote from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.utils import get_random_file_name + +def get_job_id(hosts, len_id=9): + """ + Returns a job id that is on the system by searching all jobs on system. + + Parameters + ---------- + hosts : obj + Connection to host machine + len_id : int + Size for search and job_id of specific or lower size + """ + results = hosts.all.shell(cmd="jls") + for result in results.contacted.values(): + all_jobs = result.get("stdout_lines") + for job_n_info in all_jobs: + job = job_n_info.split() + if len(job[2]) <= len_id: + return job[2] # Make sure job list * returns something def test_zos_job_query_func(ansible_zos_module): @@ -44,22 +63,23 @@ def test_zos_job_query_func(ansible_zos_module): // """ -TEMP_PATH = "/tmp/jcl" +TEMP_PATH = "/tmp/" # test to show multi wildcard in Job_id query won't crash the search def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module jdata_set_name = get_tmp_ds_name() - hosts.all.file(path=TEMP_PATH, state="directory") + temp_path = get_random_file_name(dir=TEMP_PATH) + hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {TEMP_PATH}/SAMPLE" + cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {temp_path}/SAMPLE" ) hosts.all.zos_data_set( name=jdata_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd=f"cp {TEMP_PATH}/SAMPLE \"//'{jdata_set_name}(SAMPLE)'\"" + cmd=f"cp {temp_path}/SAMPLE \"//'{jdata_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( src=f"{jdata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 @@ -75,7 +95,7 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): assert qresult.get("jobs") is not None finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=jdata_set_name, state="absent") @@ -84,15 +104,16 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module ndata_set_name = get_tmp_ds_name() - hosts.all.file(path=TEMP_PATH, state="directory") + temp_path = get_random_file_name(dir=TEMP_PATH) + hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {TEMP_PATH}/SAMPLE" + cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {temp_path}/SAMPLE" ) hosts.all.zos_data_set( name=ndata_set_name, state="present", type="pds", replace=True ) hosts.all.shell( - cmd=f"cp {TEMP_PATH}/SAMPLE \"//'{ndata_set_name}(SAMPLE)'\"" + cmd=f"cp {temp_path}/SAMPLE \"//'{ndata_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( src=f"{ndata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 @@ -107,19 +128,24 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): assert qresult.get("jobs") is not None finally: - hosts.all.file(path=TEMP_PATH, state="absent") + hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=ndata_set_name, state="absent") def test_zos_job_id_query_short_ids_func(ansible_zos_module): hosts = ansible_zos_module - qresults = hosts.all.zos_job_query(job_id="STC00002") + len_id = 9 + job_id = get_job_id(hosts, len_id) + qresults = hosts.all.zos_job_query(job_id=job_id) for qresult in qresults.contacted.values(): assert qresult.get("jobs") is not None def test_zos_job_id_query_short_ids_with_wilcard_func(ansible_zos_module): hosts = ansible_zos_module - qresults = hosts.all.zos_job_query(job_id="STC00*") + len_id = 9 + job_id = get_job_id(hosts, len_id) + job_id = job_id[0:4] + '*' + qresults = hosts.all.zos_job_query(job_id=job_id) for qresult in qresults.contacted.values(): assert qresult.get("jobs") is not None From 143756c129518f3794a69ab7978fe4abb72e1e8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Wed, 4 Sep 2024 10:37:34 -0600 Subject: [PATCH 461/495] [Bugfix][531]Validate_parameter_recover_to_tolerate_enqueue (#1643) * Add enque * Fix pass argument * Add example and changelog * Delete tests/test_config.yml * Change name of fragment * Validate force is in * Delete changelogs/fragments/1643-Add_parameter_to_tolerate_enque.yml * Change names * Fix test * Update 1643-Validate_parameter_recover_to_tolerate_enqueue.yml * Change fragment * Update 1643-Validate_parameter_recover_to_tolerate_enqueue.yml * [zos_copy] Enable zos_copy test cases portability (#1664) * Updated test cases to allow portability * removed KSDS hardcoded data set * Continued removing hardcoded data sets * Fixed merge * Updated uss file names to use temporary names * Added utils * Fixed test case * Fixed test case * Fixed test case by creating folder * Updated other test cases to use module_utils/get_random_file_name instead of implementing its own function * Added changelog * Updated randome file names * [Enhancement][117]Validate_to_restore_keep_orginial_hlq (#1632) * First iteration * Add test case * Change test cases * Fix * Avoid bad use of execption * Validate case * Restore * Add fragment * Fix Sanity * Update zos_backup_restore.py * Fixes default and new behaviour * Fix ansible lint * Correct test and logic * Modify fragment * Change N for R and K * Change work * Fix typos * Change logic * Changes Ansible-lint * Change behaviour * Delete case * Remove erros * Restore test * Change documentation * Fix tmphlq * Fix restore backpu * Delete remmants * Fix documentation * Update documentation * Backup restore * Fix comment * Fix comments * Update 1632-Validate_to_restore_keep_orginial_hlq.yml Updated description a bit and fixed grammar. --------- Co-authored-by: Demetri <dimatos@gmail.com> * Change test to be complete * Changes * Revert "[Enhancement][117]Validate_to_restore_keep_orginial_hlq (#1632)" This reverts commit 1685812f2579d012578cce361edfcb5063ef2231. * Fix comment * Change fragment --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> Co-authored-by: Demetri <dimatos@gmail.com> --- ..._parameter_recover_to_tolerate_enqueue.yml | 5 ++ plugins/modules/zos_backup_restore.py | 2 +- .../modules/test_zos_backup_restore.py | 61 +++++++++++++++++++ 3 files changed, 67 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1643-Validate_parameter_recover_to_tolerate_enqueue.yml diff --git a/changelogs/fragments/1643-Validate_parameter_recover_to_tolerate_enqueue.yml b/changelogs/fragments/1643-Validate_parameter_recover_to_tolerate_enqueue.yml new file mode 100644 index 000000000..b7eb789f0 --- /dev/null +++ b/changelogs/fragments/1643-Validate_parameter_recover_to_tolerate_enqueue.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_backup_restore - When a recoverable error was encountered and ``recover=True``, + the module would ignore the option and fail. Fix now does not fail when a recoverable + error is raised when ``recover=True``. + (https://github.com/ansible-collections/ibm_zos_core/pull/1643). diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 00d48a62d..cd2e0b00d 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -124,7 +124,7 @@ required: True recover: description: - - Specifies if potentially recoverable errors should be ignored. + - When I(recover=true) and I(operation=backup) then potentially recoverable errors will be ignored. type: bool default: False overwrite: diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 52e9040e5..32b721cfb 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -29,6 +29,31 @@ TMP_DIRECTORY = "/tmp/" +c_pgm="""#include <stdio.h> +#include <stdlib.h> +#include <string.h> +int main(int argc, char** argv) +{ + char dsname[ strlen(argv[1]) + 4]; + sprintf(dsname, \\\"//'%s'\\\", argv[1]); + FILE* member; + member = fopen(dsname, \\\"rb,type=record\\\"); + sleep(300); + fclose(member); + return 0; +} +""" + +call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M +//LOCKMEM EXEC PGM=BPXBATCH +//STDPARM DD * +SH {1}/pdse-lock '{0}' +//STDIN DD DUMMY +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + + # ---------------------------------------------------------------------------- # # Helper functions # # ---------------------------------------------------------------------------- # @@ -885,3 +910,39 @@ def test_backup_into_gds(ansible_zos_module, dstype): finally: hosts.all.shell(cmd=f"drm ANSIBLE.* ; drm OMVSADM.*") + +def test_backup_tolerate_enqueue(ansible_zos_module): + hosts = ansible_zos_module + default_data_set_name_1 = get_tmp_ds_name() + default_data_set_name_2 = get_tmp_ds_name() + temp_file = get_random_file_name(dir=TMP_DIRECTORY) + data_sets_hlq = "ANSIBLE.**" + data_sets_backup_location = get_tmp_ds_name() + try: + hosts.all.shell(cmd="dtouch {0}".format(default_data_set_name_1)) + hosts.all.shell(cmd="dtouch {0}".format(default_data_set_name_2)) + hosts.all.shell(cmd="""decho "HELLO WORLD" "{0}" """.format(default_data_set_name_1)) + hosts.all.shell(cmd="""decho "HELLO WORLD" "{0}" """.format(default_data_set_name_2)) + hosts.all.file(path=temp_file, state="directory") + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > {temp_file}/pdse-lock.c") + hosts.all.shell( + cmd=f"echo \"{call_c_jcl.format(default_data_set_name_1, temp_file)}\""+ " > {0}/call_c_pgm.jcl".format(temp_file) + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=temp_file) + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=temp_file) + time.sleep(5) + results = hosts.all.zos_backup_restore( + operation="backup", + recover=True, + data_sets=dict(include=data_sets_hlq), + backup_name=data_sets_backup_location, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_sets_backup_location) + finally: + hosts.all.shell(cmd="rm -rf " + temp_file) + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd=f"kill 9 {pid.strip()}") + hosts.all.shell(cmd='rm -r {0}'.format(temp_file)) + hosts.all.shell(cmd=f"drm ANSIBLE.* ") \ No newline at end of file From cf0c0778c55b8789261a1a71a1b23dd7f562a9c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Wed, 4 Sep 2024 11:17:31 -0600 Subject: [PATCH 462/495] [Enabler][1596]portability_zos_blockinfile (#1676) * First iteration * Fix blockinfile test * Fix remove * Add fragment * Remove remaining * Fix change * Fix blockinfile * Remove all tmp occurrences * Fix tmp * Fix tmp --------- Co-authored-by: Ivan Moreno <iamorenosoto@gmail.com> --- .../1676-portability_zos_blockinfile.yml | 3 + .../modules/test_zos_blockinfile_func.py | 281 ++++++++---------- 2 files changed, 119 insertions(+), 165 deletions(-) create mode 100644 changelogs/fragments/1676-portability_zos_blockinfile.yml diff --git a/changelogs/fragments/1676-portability_zos_blockinfile.yml b/changelogs/fragments/1676-portability_zos_blockinfile.yml new file mode 100644 index 000000000..bb0ee4b9c --- /dev/null +++ b/changelogs/fragments/1676-portability_zos_blockinfile.yml @@ -0,0 +1,3 @@ +trivial: + - zos_blockinfile- Remove the use of hard coded dataset and files names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1676). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 84d0850da..fd03d17f6 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -14,14 +14,16 @@ from __future__ import absolute_import, division, print_function import time import re -import inspect from shellescape import quote import pytest -from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.dataset import ( + get_tmp_ds_name, + get_random_q, +) +from ibm_zos_core.tests.helpers.utils import get_random_file_name __metaclass__ = type -TEST_FOLDER_BLOCKINFILE = "/tmp/ansible-core-tests/zos_blockinfile/" c_pgm="""#include <stdio.h> #include <stdlib.h> @@ -41,7 +43,7 @@ call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //LOCKMEM EXEC PGM=BPXBATCH //STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}({1})' +SH {2}pdse-lock '{0}({1})' //STDIN DD DUMMY //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* @@ -434,18 +436,18 @@ # not supported data set types NS_DS_TYPE = ['esds', 'rrds', 'lds'] -USS_BACKUP_FILE = "/tmp/backup.tmp" -BACKUP_OPTIONS = [None, "BLOCKIF.TEST.BACKUP", "BLOCKIF.TEST.BACKUP(BACKUP)"] +TMP_DIRECTORY = "/tmp/" + +BACKUP_OPTIONS = [None, "SEQ", "MEM"] def set_uss_environment(ansible_zos_module, content, file): hosts = ansible_zos_module - hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_BLOCKINFILE}") hosts.all.file(path=file, state="touch") hosts.all.shell(cmd=f"echo \"{content}\" > {file}") -def remove_uss_environment(ansible_zos_module): +def remove_uss_environment(ansible_zos_module, file): hosts = ansible_zos_module - hosts.all.shell(cmd="rm -rf" + TEST_FOLDER_BLOCKINFILE) + hosts.all.shell(cmd="rm " + file) def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module @@ -479,7 +481,7 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -491,7 +493,7 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -502,7 +504,7 @@ def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -514,7 +516,7 @@ def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -525,7 +527,7 @@ def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -537,7 +539,7 @@ def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -548,7 +550,7 @@ def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): "block":"# this is file is for setting env vars", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -560,7 +562,7 @@ def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -575,7 +577,7 @@ def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -587,7 +589,7 @@ def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @@ -603,7 +605,7 @@ def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -615,7 +617,7 @@ def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -630,7 +632,7 @@ def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -642,7 +644,7 @@ def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -657,7 +659,7 @@ def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -669,7 +671,7 @@ def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -679,7 +681,7 @@ def test_uss_block_absent_defaultmarker(ansible_zos_module): "block":"", "state":"absent" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -691,7 +693,7 @@ def test_uss_block_absent_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -704,7 +706,7 @@ def test_uss_block_absent_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_CUSTOMMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -716,7 +718,7 @@ def test_uss_block_absent_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -727,7 +729,7 @@ def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -739,7 +741,7 @@ def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -750,7 +752,7 @@ def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -762,7 +764,7 @@ def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -773,7 +775,7 @@ def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -785,7 +787,7 @@ def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_EOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -796,7 +798,7 @@ def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): "block":"# this is file is for setting env vars", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -808,7 +810,7 @@ def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_BOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -822,7 +824,7 @@ def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -834,7 +836,7 @@ def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_EOF_REGEX_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -848,7 +850,7 @@ def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_CUSTOMMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -860,7 +862,7 @@ def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_BOF_REGEX_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -874,7 +876,7 @@ def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_CUSTOMMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -886,7 +888,7 @@ def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -900,7 +902,7 @@ def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_CUSTOMMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -912,7 +914,7 @@ def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -924,7 +926,7 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): "state":"present", "indentation":16 } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -936,7 +938,7 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) # Test case base on bug of dataset.blockifile # GH Issue #1258 @@ -944,7 +946,7 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): def test_uss_block_insert_with_doublequotes(ansible_zos_module): hosts = ansible_zos_module params = dict(insertafter="sleep 30;", block='cat "//OMVSADMI.CAT"\ncat "//OMVSADM.COPYMEM.TESTS" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DOUBLEQUOTES try: set_uss_environment(ansible_zos_module, content, full_path) @@ -957,7 +959,7 @@ def test_uss_block_insert_with_doublequotes(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -969,7 +971,7 @@ def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): "state":"present", "backup":True } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -984,20 +986,21 @@ def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: ansible_zos_module.all.file(path=backup_name, state="absent") - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): hosts = ansible_zos_module + uss_backup_file = get_random_file_name(dir=TMP_DIRECTORY, suffix=".tmp") params = { "insertafter":"EOF", "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", "state":"present", "backup":True, - "backup_name":USS_BACKUP_FILE + "backup_name":uss_backup_file } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -1005,8 +1008,8 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert result.get("backup_name") == USS_BACKUP_FILE - cmd_str = f"cat {USS_BACKUP_FILE}" + assert result.get("backup_name") == uss_backup_file + cmd_str = f"cat {uss_backup_file}" results = ansible_zos_module.all.shell(cmd=cmd_str) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT @@ -1014,8 +1017,8 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: - ansible_zos_module.all.file(path=USS_BACKUP_FILE, state="absent") - remove_uss_environment(ansible_zos_module) + ansible_zos_module.all.file(path=uss_backup_file, state="absent") + remove_uss_environment(ansible_zos_module, full_path) ######################### @@ -1034,7 +1037,7 @@ def test_ds_block_insertafter_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1060,7 +1063,7 @@ def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1086,7 +1089,7 @@ def test_ds_block_insertafter_eof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1112,7 +1115,7 @@ def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1138,7 +1141,7 @@ def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1164,7 +1167,7 @@ def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1190,7 +1193,7 @@ def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1216,7 +1219,7 @@ def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1241,7 +1244,7 @@ def test_ds_block_absent(ansible_zos_module, dstype): "state":"absent" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1261,25 +1264,26 @@ def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets hosts = ansible_zos_module ds_type = "seq" + hlq = get_random_q() params={ "insertafter":"EOF", "block":"export ZOAU_ROOT\n", "state":"present", "backup":True, - "tmp_hlq":"TMPHLQ" + "tmp_hlq": hlq } kwargs = { - "backup_name":r"TMPHLQ\.." + "backup_name":"{0}".format(hlq) } content = TEST_CONTENT try: ds_full_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_full_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " hosts.all.shell(cmd=cmd_str) - hosts.all.shell(cmd="rm -rf " + "/tmp/zos_lineinfile/") + hosts.all.shell(cmd="rm " + ds_full_name) results = hosts.all.shell(cmd=f"cat \"//'{ds_full_name}'\" | wc -l ") for result in results.contacted.values(): assert int(result.get("stdout")) != 0 @@ -1287,9 +1291,10 @@ def test_ds_tmp_hlq_option(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): for key in kwargs: - assert re.match(kwargs.get(key), result.get(key)) + assert kwargs.get(key) in result.get(key) finally: hosts.all.zos_data_set(name=ds_full_name, state="absent") + hosts.all.file(name=temp_file, state="absent") @pytest.mark.ds @@ -1304,7 +1309,7 @@ def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, ds "indentation":16 } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1332,9 +1337,12 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup "backup":True } if backup_name: - params["backup_name"] = backup_name + if backup_ds_name == "SEQ": + params["backup_name"] = get_tmp_ds_name() + else: + params["backup_name"] = get_tmp_ds_name() + "(MEM)" ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1349,8 +1357,6 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: remove_ds_environment(ansible_zos_module, ds_name) - if backup_name: - ansible_zos_module.all.zos_data_set(name="BLOCKIF.TEST.BACKUP", state="absent") if backup_ds_name != "": ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") @@ -1370,7 +1376,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): "force":True } member_1, member_2 = "MEM1", "MEM2" - temp_file = f"/tmp/{member_2}" + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT if ds_type == "seq": params["path"] = f"{default_data_set_name}.{member_2}" @@ -1405,14 +1411,15 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.file(path="/tmp/disp_shr/", state="directory") - hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + path = get_random_file_name(suffix="/", dir=TMP_DIRECTORY) + hosts.all.file(path=path, state="directory") + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > {path}pdse-lock.c") hosts.all.shell( - cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1)}\""+ - " > /tmp/disp_shr/call_c_pgm.jcl" + cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1, path)}\""+ + " > {0}call_c_pgm.jcl".format(path) ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=path) + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=path) time.sleep(5) # call lineinfile to see results results = hosts.all.zos_blockinfile(**params) @@ -1426,7 +1433,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") - hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.shell(cmd='rm -r {0}'.format(path)) hosts.all.zos_data_set(name=default_data_set_name, state="absent") @@ -1523,96 +1530,39 @@ def test_uss_encoding(ansible_zos_module, encoding): "state":"present" } params["encoding"] = encoding - full_path = TEST_FOLDER_BLOCKINFILE + encoding + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = "SIMPLE LINE TO VERIFY" try: - hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_BLOCKINFILE}") hosts.all.file(path=full_path, state="touch") - hosts.all.shell(cmd=f"echo \"{content}\" > {full_path}") - hosts.all.zos_encode( - src=full_path, - dest=full_path, - from_encoding="IBM-1047", - to_encoding=params["encoding"] - ) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) + hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"]) params["path"] = full_path results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) - for result in results.contacted.values(): - assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" - - params["src"] = ds_name + "(-1)" - results = hosts.all.zos_blockinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) - for result in results.contacted.values(): - assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" - - params_w_bck = dict(insertafter="eof", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=ds_name + "(+1)") - params_w_bck["src"] = ds_name + "(-1)" - results = hosts.all.zos_blockinfile(**params_w_bck) - for result in results.contacted.values(): - assert result.get("changed") == 1 - assert result.get("rc") == 0 - backup = ds_name + "(0)" - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) - for result in results.contacted.values(): - assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" - - params["src"] = ds_name + "(-3)" - results = hosts.all.zos_blockinfile(**params) + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): - assert result.get("changed") == 0 + assert result.get("stdout") == EXPECTED_ENCODING finally: - hosts.all.shell(cmd="""drm "ANSIBLE.*" """) + remove_uss_environment(ansible_zos_module, full_path) + @pytest.mark.ds def test_special_characters_ds_insert_block(ansible_zos_module): hosts = ansible_zos_module - ds_type = dstype - insert_data = "Insert this string" - params = { - "insertafter":"SIMPLE", - "block":insert_data, - "state":"present" - } - params["encoding"] = encoding - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = "SIMPLE LINE TO VERIFY" + params = dict(insertafter="eof", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + ds_name = get_tmp_ds_name(5, 5, symbols=True) + backup = get_tmp_ds_name(6, 6, symbols=True) try: - hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_encode( - src=temp_file, - dest=temp_file, - from_encoding="IBM-1047", - to_encoding=params["encoding"] - ) - hosts.all.zos_data_set(name=ds_name, type=ds_type) - if ds_type in ["pds", "pdse"]: - ds_full_name = ds_name + "(MEM)" - hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") - cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" - else: - ds_full_name = ds_name - cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " - hosts.all.shell(cmd=cmd_str) - hosts.all.shell(cmd="rm -rf " + temp_file) - params["path"] = ds_full_name + result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + + params["src"] = ds_name results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - hosts.all.zos_encode( - src=ds_full_name, - dest=ds_full_name, - from_encoding=params["encoding"], - to_encoding="IBM-1047" - ) - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + src = ds_name.replace('$', "\$") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) for result in results.contacted.values(): assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" @@ -1661,7 +1611,7 @@ def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): } params["insertafter"] = 'SOME_NON_EXISTING_PATTERN' ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1731,7 +1681,7 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): "force":False } member_1, member_2 = "MEM1", "MEM2" - temp_file = f"/tmp/{member_2}" + temp_file = get_random_file_name(dir=TMP_DIRECTORY) params["path"] = f"{default_data_set_name}({member_2})" content = TEST_CONTENT try: @@ -1765,14 +1715,15 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.file(path="/tmp/disp_shr/", state="directory") - hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + path = get_random_file_name(suffix="/", dir=TMP_DIRECTORY) + hosts.all.file(path=path, state="directory") + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > {path}pdse-lock.c") hosts.all.shell( - cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1)}\""+ - " > /tmp/disp_shr/call_c_pgm.jcl" + cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1, path)}\""+ + " > {0}call_c_pgm.jcl".format(path) ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=path) + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=path) time.sleep(5) # call lineinfile to see results results = hosts.all.zos_blockinfile(**params) @@ -1783,5 +1734,5 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") - hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.shell(cmd='rm -r {0}'.format(path)) hosts.all.zos_data_set(name=default_data_set_name, state="absent") From 53f3eb853bc1569ee625be0d5605f798254c8021 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Wed, 4 Sep 2024 14:26:24 -0400 Subject: [PATCH 463/495] Update bug_issue.yml (#1679) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Customer mentioned v1.9.2 of zos_core was not on the list. Co-authored-by: André Marcel Gutiérrez Benítez <68956970+AndreMarcel99@users.noreply.github.com> --- .github/ISSUE_TEMPLATE/bug_issue.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index dba9db3b8..e55e884a3 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -39,6 +39,7 @@ body: - v1.11.0-beta.1 - v1.10.0 - v1.10.0-beta.1 + - v1.9.2 - v1.9.0 (default) - v1.8.0 - v1.7.0 From ef7069ea7dc0ff743c8391270e358168d1fc07b2 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Tue, 10 Sep 2024 12:08:32 -0600 Subject: [PATCH 464/495] [Enhancement] [zos_job_*] Return address space used by jobs (#1673) * Return job type when querying jobs * Update zos_job_query docs * Remove comment * Update zos_job_output docs * Remove unused content_type * Update zos_job_submit docs * Add changelog fragment * Update tests * Fix merge error * Fix more merge errors * Update RST files * Update changelog fragment * Change 'job_type' to 'content_type' * Update module docs * Fix extra whitespace at end of content type * Correct changelog fragment --- changelogs/fragments/1673-return-job-type.yml | 10 ++++++++++ docs/source/modules/zos_job_output.rst | 2 +- docs/source/modules/zos_job_query.rst | 8 ++++++++ docs/source/modules/zos_job_submit.rst | 6 ++++++ plugins/module_utils/job.py | 5 +++-- plugins/modules/zos_job_output.py | 2 +- plugins/modules/zos_job_query.py | 8 ++++++++ plugins/modules/zos_job_submit.py | 5 +++++ tests/functional/modules/test_zos_copy_func.py | 11 ----------- tests/functional/modules/test_zos_job_output_func.py | 1 + tests/functional/modules/test_zos_job_query_func.py | 8 ++++++++ tests/functional/modules/test_zos_job_submit_func.py | 4 +++- 12 files changed, 54 insertions(+), 16 deletions(-) create mode 100644 changelogs/fragments/1673-return-job-type.yml diff --git a/changelogs/fragments/1673-return-job-type.yml b/changelogs/fragments/1673-return-job-type.yml new file mode 100644 index 000000000..1da8ca503 --- /dev/null +++ b/changelogs/fragments/1673-return-job-type.yml @@ -0,0 +1,10 @@ +minor_changes: + - zos_job_query - Added address space type used by jobs in return JSON + as `content_type`. + (https://github.com/ansible-collections/ibm_zos_core/pull/1673). + - zos_job_output - Added address space type used by jobs in return JSON + as `content_type`. + (https://github.com/ansible-collections/ibm_zos_core/pull/1673). + - zos_job_submit - Added address space type used by jobs in return JSON + as `content_type`. + (https://github.com/ansible-collections/ibm_zos_core/pull/1673). \ No newline at end of file diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index efea6ea2a..f31ff8362 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -265,7 +265,7 @@ jobs | **type**: str content_type - Type of address space. + Type of address space used by the job. | **type**: str | **sample**: JOB diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index ea320dfc3..4b72dddf5 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -134,6 +134,7 @@ jobs [ { "asid": 0, + "content_type": "JOB", "creation_date": "2023-05-03", "creation_time": "12:13:00", "job_class": "K", @@ -147,6 +148,7 @@ jobs }, { "asid": 4, + "content_type": "JOB", "creation_date": "2023-05-03", "creation_time": "12:14:00", "job_class": "A", @@ -181,6 +183,12 @@ jobs | **type**: str | **sample**: JOB01427 + content_type + Type of address space used by the job. + + | **type**: str + | **sample**: STC + ret_code Return code output collected from job log. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 573b4f4bd..4244b78da 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -592,6 +592,12 @@ jobs | **type**: str | **sample**: HELLO + content_type + Type of address space used by the job. + + | **type**: str + | **sample**: STC + duration The total lapsed time the JCL ran for. diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 536602dc4..d9444947f 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -181,7 +181,6 @@ def _job_not_found(job_id, owner, job_name, dd_name): job["ret_code"]["msg_txt"] = "The job {0} could not be found.".format(job_not_found_msg) job["class"] = "" - job["content_type"] = "" job["ddnames"] = [] dd = {} @@ -350,6 +349,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["subsystem"] = "" job["system"] = "" job["owner"] = entry.owner + # Sometimes, with job type STC, the first entry will have an extra + # space at the end of it. + job["content_type"] = entry.job_type.strip() # From v1.3.0, ZOAU sets unavailable job fields as None, instead of '?'. job["ret_code"] = {} @@ -371,7 +373,6 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["queue_position"] = entry.queue_position job["program_name"] = entry.program_name job["class"] = "" - job["content_type"] = "" job["ret_code"]["steps"] = [] job["ddnames"] = [] job["duration"] = duration diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 21e0af3e6..986578f81 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -113,7 +113,7 @@ sample: content_type: description: - Type of address space. + Type of address space used by the job. type: str sample: JOB creation_date: diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index be2bb513f..328426ada 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -119,6 +119,11 @@ Unique job identifier assigned to the job by JES. type: str sample: JOB01427 + content_type: + description: + Type of address space used by the job. + type: str + sample: STC ret_code: description: Return code output collected from job log. @@ -225,6 +230,7 @@ "job_id": "JOB01427", "ret_code": "null", "job_class": "K", + "content_type": "JOB", "svc_class": "?", "priority": 1, "asid": 0, @@ -236,6 +242,7 @@ "job_name": "LINKCBL", "owner": "ADMIN", "job_id": "JOB16577", + "content_type": "JOB", "ret_code": { "msg": "CANCELED", "code": "null" }, "job_class": "A", "svc_class": "E", @@ -410,6 +417,7 @@ def parsing_jobs(jobs_raw): "job_name": job.get("job_name"), "owner": job.get("owner"), "job_id": job.get("job_id"), + "content_type": job.get("content_type"), "system": job.get("system"), "subsystem": job.get("subsystem"), "ret_code": ret_code, diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index d91b511c3..ce472e266 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -158,6 +158,11 @@ The name of the batch job. type: str sample: HELLO + content_type: + description: + Type of address space used by the job. + type: str + sample: STC duration: description: The total lapsed time the JCL ran for. type: int diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 9df423fb8..61ba9982d 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -2350,17 +2350,6 @@ def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): dest = get_random_file_name(dir=TMP_DIRECTORY) hosts = ansible_zos_module - mlq_size = 3 - cobol_src_pds = get_tmp_ds_name(mlq_size) - cobol_src_mem = "HELLOCBL" - cobol_src_mem2 = "HICBL2" - src_lib = get_tmp_ds_name(mlq_size) - dest_lib = get_tmp_ds_name(mlq_size) - dest_lib_aliases = get_tmp_ds_name(mlq_size) - pgm_mem = "HELLO" - pgm2_mem = "HELLO2" - pgm_mem_alias = "ALIAS1" - pgm2_mem_alias = "ALIAS2" try: hosts.all.file(path=dest, state="touch") hosts.all.shell(cmd=f"decho 'test line' '{src_ds}' ") diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 606e93aab..7252f4c22 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -114,6 +114,7 @@ def test_zos_job_output_job_exists(ansible_zos_module): assert result.get("jobs") is not None assert result.get("jobs")[0].get("ret_code").get("steps") is not None assert result.get("jobs")[0].get("ret_code").get("steps")[0].get("step_name") == "STEP0001" + assert result.get("jobs")[0].get("content_type") == "JOB" finally: hosts.all.file(path=TEMP_PATH, state="absent") diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 6999737e4..d34aeda4d 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -147,5 +147,13 @@ def test_zos_job_id_query_short_ids_with_wilcard_func(ansible_zos_module): job_id = get_job_id(hosts, len_id) job_id = job_id[0:4] + '*' qresults = hosts.all.zos_job_query(job_id=job_id) + + # Assuming we'll mostly deal with started tasks or normal jobs. + if "STC" in job_id: + content_type = "STC" + else: + content_type = "JOB" + for qresult in qresults.contacted.values(): assert qresult.get("jobs") is not None + assert qresult.get("jobs")[0].get("content_type") == content_type diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 481b30cc2..93844ed4c 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -409,7 +409,7 @@ { "default_location":False }, - ] + ] ) def test_job_submit_pds(ansible_zos_module, location): """ @@ -500,6 +500,7 @@ def test_job_submit_uss(ansible_zos_module): for result in results.contacted.values(): assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("jobs")[0].get("content_type") == "JOB" assert result.get("changed") is True finally: hosts.all.file(path=temp_path, state="absent") @@ -851,6 +852,7 @@ def test_job_submit_full_input(ansible_zos_module): finally: hosts.all.file(path=temp_path, state="absent") + def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): tmp_file = tempfile.NamedTemporaryFile(delete=True) with open(tmp_file.name, "w",encoding="utf-8") as f: From 9a1a0c9401e5abe578c1d92ca9fef19eea3c51e1 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 17 Sep 2024 14:54:08 -0700 Subject: [PATCH 465/495] Update/1601/ce linux posix support (#1682) * Initial commit for test case load balancer Signed-off-by: ddimatos <dimatos@gmail.com> * Updated load balancer with more messaging and bug fixes Signed-off-by: ddimatos <dimatos@gmail.com> * Update to load balancer Signed-off-by: ddimatos <dimatos@gmail.com> * Cleaned up a typo Signed-off-by: ddimatos <dimatos@gmail.com> * Update with typo correction Signed-off-by: ddimatos <dimatos@gmail.com> * test edit Signed-off-by: ddimatos <dimatos@gmail.com> * test edit Signed-off-by: ddimatos <dimatos@gmail.com> * Add load balancer updates Signed-off-by: ddimatos <dimatos@gmail.com> * remove print stmt Signed-off-by: ddimatos <dimatos@gmail.com> * Test executor updates Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to test framwork to allow for dictionary of configs vs file Signed-off-by: ddimatos <dimatos@gmail.com> * Updates using the new zinventory-raw fixture option Signed-off-by: ddimatos <dimatos@gmail.com> * Commented out unused code Signed-off-by: ddimatos <dimatos@gmail.com> * Refactored and commented code Signed-off-by: ddimatos <dimatos@gmail.com> * Added back commented runtime error Signed-off-by: ddimatos <dimatos@gmail.com> * Improvments to the codes logic Signed-off-by: ddimatos <dimatos@gmail.com> * Fixed a variable name bug Signed-off-by: ddimatos <dimatos@gmail.com> * Update executor with additional pydoc Signed-off-by: ddimatos <dimatos@gmail.com> * Updated the arg parser and added new args Signed-off-by: ddimatos <dimatos@gmail.com> * Updated the arg parser and added new args Signed-off-by: ddimatos <dimatos@gmail.com> * Added ability to pass in a list of z/OS managed nodes that overrides the default behavior Signed-off-by: ddimatos <dimatos@gmail.com> * Add updated sub process command with separate stdout and stderr Signed-off-by: ddimatos <dimatos@gmail.com> * Update balacer with new logic Signed-off-by: ddimatos <dimatos@gmail.com> * Update with advanced rebalance logic Signed-off-by: ddimatos <dimatos@gmail.com> * Update verobse and capture logic Signed-off-by: ddimatos <dimatos@gmail.com> * Remove prefix to ansible module utils that casues an pytest error Signed-off-by: ddimatos <dimatos@gmail.com> * Update with new doc Signed-off-by: ddimatos <dimatos@gmail.com> * Added new helpers Signed-off-by: ddimatos <dimatos@gmail.com> * Update source with new logic Signed-off-by: ddimatos <dimatos@gmail.com> * Added additional capabilities Signed-off-by: ddimatos <dimatos@gmail.com> * Updated --zinventory-raw fixture to support extra_args Signed-off-by: ddimatos <dimatos@gmail.com> * comment formatting Signed-off-by: ddimatos <dimatos@gmail.com> * udated with html generation Signed-off-by: ddimatos <dimatos@gmail.com> * udated with html generation Signed-off-by: ddimatos <dimatos@gmail.com> * Rename file Signed-off-by: ddimatos <dimatos@gmail.com> * Update CE tool Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to create and manage log Signed-off-by: ddimatos <dimatos@gmail.com> * added throttle support Signed-off-by: ddimatos <dimatos@gmail.com> * Added an overall return code to CE Signed-off-by: ddimatos <dimatos@gmail.com> * Bumped up the connection time out Signed-off-by: ddimatos <dimatos@gmail.com> * Lint updates Signed-off-by: ddimatos <dimatos@gmail.com> * Update with lint Signed-off-by: ddimatos <dimatos@gmail.com> * Update module to use other modules and clean up pydoc Signed-off-by: ddimatos <dimatos@gmail.com> * scripts/utility.py Signed-off-by: ddimatos <dimatos@gmail.com> * Utlity and minor updates Signed-off-by: ddimatos <dimatos@gmail.com> * Expanded AC functionality with support for both depedency finder and pytest finder Signed-off-by: ddimatos <dimatos@gmail.com> * Fix a bug that resulted when a skip was used not in the results Signed-off-by: ddimatos <dimatos@gmail.com> * added color to messages and shuffle code Signed-off-by: ddimatos <dimatos@gmail.com> * Begin CE and AC integration Signed-off-by: ddimatos <dimatos@gmail.com> * Integration updates to AC Signed-off-by: ddimatos <dimatos@gmail.com> * Continued intergration of ac and ce Signed-off-by: ddimatos <dimatos@gmail.com> * Added support for return cocde Signed-off-by: ddimatos <dimatos@gmail.com> * Added support for return cocde Signed-off-by: ddimatos <dimatos@gmail.com> * Enhance return code status and clean up comments Signed-off-by: ddimatos <dimatos@gmail.com> * Updating simple test case for CE Signed-off-by: ddimatos <dimatos@gmail.com> * Rename file Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to support AC tooling integration Signed-off-by: ddimatos <dimatos@gmail.com> * Prepare env for folders Signed-off-by: ddimatos <dimatos@gmail.com> * Configurations moved to a folder for cleaner look Signed-off-by: ddimatos <dimatos@gmail.com> * Updated AC connection Signed-off-by: ddimatos <dimatos@gmail.com> * lint cleanup Signed-off-by: ddimatos <dimatos@gmail.com> * Add fragment Signed-off-by: ddimatos <dimatos@gmail.com> * force bash shell for gh actions to use becasue of the redirect used <<< Signed-off-by: ddimatos <dimatos@gmail.com> * Update scripts to not use bash redirect, fix bug in old ac-test path Signed-off-by: ddimatos <dimatos@gmail.com> * Update AC to avoid use of bash array types Signed-off-by: ddimatos <dimatos@gmail.com> * Port bash arrays to a portable style Signed-off-by: ddimatos <dimatos@gmail.com> * Update venv.sh with posix support Signed-off-by: ddimatos <dimatos@gmail.com> * Increase venv.sh posix compliance Signed-off-by: ddimatos <dimatos@gmail.com> * Increase venv.sh posix compliance Signed-off-by: ddimatos <dimatos@gmail.com> * Increase venv.sh posix compliance Signed-off-by: ddimatos <dimatos@gmail.com> * Increase venv.sh posix compliance Signed-off-by: ddimatos <dimatos@gmail.com> * Increase venv.sh posix compliance Signed-off-by: ddimatos <dimatos@gmail.com> * Update echo in venv script Signed-off-by: ddimatos <dimatos@gmail.com> * added OS support for RHEL and Ubuntu Signed-off-by: ddimatos <dimatos@gmail.com> * Update for RHEL and Ubuntu Signed-off-by: ddimatos <dimatos@gmail.com> * Update for RHEL and Ubuntu Signed-off-by: ddimatos <dimatos@gmail.com> * Update for RHEL and Ubuntu Signed-off-by: ddimatos <dimatos@gmail.com> * Update for RHEL and Ubuntu Signed-off-by: ddimatos <dimatos@gmail.com> * Update echo so carriage returns are honored Signed-off-by: ddimatos <dimatos@gmail.com> * add updates to ac that include support for concurrency Signed-off-by: ddimatos <dimatos@gmail.com> * added 5 more managed nodes Signed-off-by: ddimatos <dimatos@gmail.com> * deleted old poc ce Signed-off-by: ddimatos <dimatos@gmail.com> * Fixed parsing of hosts for concurrent executor Signed-off-by: ddimatos <dimatos@gmail.com> * Wrapped defaults in strings Signed-off-by: ddimatos <dimatos@gmail.com> * Wrapped defaults in strings Signed-off-by: ddimatos <dimatos@gmail.com> * scripts/hosts.env Signed-off-by: ddimatos <dimatos@gmail.com> * Update mounts.env Swapping old 1.0.1-ga for use as 1.3.3 of zoau * Fixes issue not a valid revision where it sees test_module_security.py as the first arg passed to the command Signed-off-by: ddimatos <dimatos@gmail.com> * add support for versioned precompiled binaries for zoau Signed-off-by: ddimatos <dimatos@gmail.com> * Added support for proper doc gen Signed-off-by: ddimatos <dimatos@gmail.com> * Update ac module doc to support clean Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to correctly create venv's Signed-off-by: ddimatos <dimatos@gmail.com> * removed unused echo Signed-off-by: ddimatos <dimatos@gmail.com> * Updated message Signed-off-by: ddimatos <dimatos@gmail.com> * Corrected a bug on the positioning of where pip is called Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc gen with LIBRARY path Signed-off-by: ddimatos <dimatos@gmail.com> * Fixed a but that did not recoginze a single host Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Rich Parker <richp405@gmail.com> --- ac | 822 +++++++++++++----- docs/Makefile | 26 - scripts/ce.py | 6 +- scripts/configurations/requirements-2.12.env | 2 +- scripts/configurations/requirements-2.17.env | 2 +- .../configurations/requirements-common.env | 1 + scripts/configurations/requirements-doc.env | 106 +++ scripts/hosts.env | 59 +- scripts/mounts.env | 26 +- scripts/venv.sh | 226 +++-- tests/helpers/ztest.py | 2 +- 11 files changed, 925 insertions(+), 353 deletions(-) create mode 100644 scripts/configurations/requirements-doc.env diff --git a/ac b/ac index 14fa159d4..46bd21ef4 100755 --- a/ac +++ b/ac @@ -1,6 +1,6 @@ -#!/bin/sh +#!/bin/bash # ============================================================================== -# Copyright (c) IBM Corporation 2023 +# Copyright (c) IBM Corporation 2024 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -13,23 +13,27 @@ # ============================================================================== # ============================================================================== -# Global Vars +# Global Vars and helpers # ============================================================================== -# Note: using the venv.sh script to find the latest venv puts the current 'ac' -# in the scripts directory because venv.sh performs a 'cd $(dirname $0)' and -# then other scripts can't be found in the managed venv corectly. Although this -# is probably a temporary solution, we now take the same code from venv.sh and -# use it here in 'ac' to find the latst managed venv. What should be -# done is to have some meta-data written out to venv/* that this command 'ac' -# can easily find, might be helpful to have some stats like dates created and -# so on. -# VENV=`scripts/./venv.sh --latest_venv` +# TODO: - write stats to the venv such as date created, updated, etc + +# Trap custom exit value to exit child processes. +trap "exit 100" TERM +export PARENT_PID=$$ VENV_HOME_MANAGED=${PWD%/venv}/venv +# ------------------------------------------------------------------------------ +# This method will terminate the entire script by killing the parent process. +# When exiting from within a function, use exit_all' not 'exit n' else the parent +# proccess will not exit. +# ------------------------------------------------------------------------------ +function exit_all(){ + kill -s TERM $PARENT_PID +} + # Normalize the version from 3.10.2 to 3010002000 -# Do we we need that 4th octet? normalize_version() { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; } @@ -49,17 +53,20 @@ latest_venv(){ fi } -# Method will take a venv name such as venv-2.16 and validate that it exists +# Method will take a venv name such as venv-2.16 and validate that it exists, otherwise error and exit validate_venv(){ option_venv=$1 - #test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* 2>/dev/null` if [[ "$option_venv" =~ "latest" ]]; then test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-latest* 2>/dev/null` if [[ "$test_for_managed_venv" =~ "latest" ]]; then dir_version_latest=$option_venv fi - #elif [ ! -z "$test_for_managed_venv" ]; then + elif [[ "$option_venv" =~ "doc" ]]; then + test_for_managed_venv=`ls -d "$VENV_HOME_MANAGED"/venv-doc* 2>/dev/null` + if [[ "$test_for_managed_venv" =~ "doc" ]]; then + dir_version_latest=$option_venv + fi else for dir_version in `ls -d "$VENV_HOME_MANAGED"/venv-[0-9].[0-9]* | rev | cut -d"/" -f1| rev`; do if [ $dir_version == $option_venv ]; then @@ -71,25 +78,31 @@ validate_venv(){ if [ ! -z "$dir_version_latest" ]; then echo "${VENV_HOME_MANAGED}"/$dir_version_latest else - echo "Unable to validate managed venv option $option_venv, exiting." + message_error "Unable to validate managed venv option $option_venv, exiting." exit - fi + fi } +# TODO: Wrap this with an exist check so that you can override the venv from the shell VENV=`latest_venv` -file="" -verbose=0 -DIV="-----------------------------------------------------------------------" -CURRENT_DIR=`pwd` -cd $CURRENT_DIR -# VENV_BIN should equate to same as $VIRTUAL_ENV after the venv activate if [ ! -z "$VENV" ]; then VENV_BIN=$VENV/bin VENV_BASENAME=`basename $VENV` fi +CURRENT_DIR=`pwd` +cd $CURRENT_DIR CURR_DIR=`pwd` +file="" +verbose=0 +GH_BRANCH=`git branch |grep "*" | cut -d" " -f2` +DIV="-----------------------------------------------------------------------" + +# if '0' then Docker is up, else '1' then docker is not up +DOCKER_INFO=`podman info> /dev/null 2>&1;echo $?` + +# Vars used to aid in terminal message colors RED=$'\e[1;31m' GRN=$'\e[1;32m' YEL=$'\e[1;33m' @@ -97,8 +110,6 @@ BLU=$'\e[1;34m' MAG=$'\e[1;35m' CYN=$'\e[1;36m' ENDC=$'\e[0m' -# 0 Docker is up, 1 docker is not up -DOCKER_INFO=`podman info> /dev/null 2>&1;echo $?` # ============================================================================== # Arg parsing helpers @@ -121,22 +132,33 @@ message(){ # Use this method for error messages to the console. # ------------------------------------------------------------------------------ message_error(){ - ERROR_MSG="${RED}ERROR${ENDC}: $1" - printf '%s\n' "${ERROR_MSG}" >&2 - exit 1 + printf '%s\n' "${RED}${DIV}${RED}" >&2 + printf '%s\n' "${RED}ERROR:${ENDC} ${1}" >&2 + printf '%s\n' "${RED}${DIV}${ENDC}" >&2 + exit_all } +# ------------------------------------------------------------------------------ +# This method generates an WARN message with yellow color. This message +# will always be sent to STDERR so that STDOUT can be reserved for return codes. +# Use this method for error messages to the console. +# ------------------------------------------------------------------------------ message_warn(){ - WARN_MSG="${YEL}WARN${ENDC}: $1" - printf '%s\n' "${WARN_MSG}" >&2 + printf '%s\n' "${YEL}${DIV}${YEL}" >&2 + printf '%s\n' "${YEL}WARN:${ENDC} ${1}" >&2 + printf '%s\n' "${YEL}${DIV}${YEL}" >&2 } +# ------------------------------------------------------------------------------ +# This method ehcecks to see the VENV variable has been set, if not it produces +# an error message with instructions on how to correct it. +# ------------------------------------------------------------------------------ ensure_managed_venv_exists(){ if [ -z "$VENV" ]; then - echo "Option $1 requires that a managed virtual environment be configured. "\ - "Run $0 -venv-setup to create managed viritual environments. "\ - "For additional optons, use $0 --help." - exit 1 + message_error "Option $1 requires that a managed virtual environment be configured. + Run $0 -venv-setup to create managed viritual environments. + For additional optons, use $0 --help." + exit_all fi } @@ -260,159 +282,311 @@ option_sanitize(){ # ============================================================================== # ------------------------------------------------------------------------------ -# Run a bandit security scan on the plugin directory +# Run a bandit static scan on the plugins directory on the hosts local branch +# where the 'ac' is running. # ------------------------------------------------------------------------------ #->ac-bandit: -## Run a bandit security scan on the plugins directory, set the severity level. -## Usage: ac [--bandit <level>] -## Usage: ac [--bandit <level>] -## <level> - choose from 'l', 'll', 'lll' -## - l all low, medium, high severity -## - ll all medium, high severity -## - lll all high severity +## Run bandit static scan on the plugins directory on the local GH branch. +## Usage: ac --ac-bandit [--level <str>] +## Options: +## level (optional): +## - choose from 'l', 'll', 'lll' +## - Defaults to, 'l' +## - l, all low, medium, high severities +## - ll, all medium, high severities +## - lllm all high severities ## Example: ## $ ac --ac-bandit --level ll ## $ ac --ac-bandit ac_bandit(){ option_level=$1 if [ ! "$option_level" ]; then - option_level="ll" + option_level="l" fi message "Running Bandit scan with level '$option_level'" . $VENV_BIN/activate && python3 -m bandit -r plugins/* -"${option_level}" } # ------------------------------------------------------------------------------ -# Build and install collection of the local GH branch. -# To not dirty the host, consider installing in the venv: -# ansible-galaxy -vv collection install --force -p venv/lib/python3.8/site-packages/ansible_collections +# Build and install a collection of the local branch checked out where 'ac' is +# running. Installation is set the --name option, local host or venv. # ------------------------------------------------------------------------------ #->ac-build: -## Build and install collection of the local GH branch. -## Usage: ac [--ac-build] +## Build and install collection of the local GH branch, select installation path. +## Usage: ac --ac-build [--name <str>] +## Options: +## name (optional) +## - The location to install, by default it will install the collection +## in the latest venv. If value 'local' is set, it will +## install the collection on the host. ## Example: +## $ ac --ac-build --name local +## $ ac --ac-build --name venv-2.14 ## $ ac --ac-build ac_build(){ - gh_branch=`git branch |grep "*" | cut -d" " -f2` - message "Build and install collection of the local GH branch: '$gh_branch'." + option_name=$1 + galaxy_path="" # Empty installs to host default + git_init="" + base_name="" + + # There must be a parent git directory in non-default collection installations, see issues + # https://github.com/ansible/ansible/issues/68499#issuecomment-873660057 + # https://github.com/ansible/ansible/issues/63032 + # Work around is to perform a git init . and create an empty repo where the collection is installed, + # does not seem to to be an issue with the host installation thus far, only venv's. This is required + # for ansible-test sanity tests to run, else sanity fails with 'WARNING: All targets skipped.' + + if [ "$option_name" ]; then + if [ "$option_name" == "local" ];then + base_name="$HOME/.ansible/collections/ansible_collections" + else + VENV=`validate_venv $option_name` + galaxy_path="-p ${VENV}" + base_name=`basename $VENV` + git_init="git init ${VENV}/ansible_collections --quiet" + fi + else + galaxy_path="-p ${VENV}" + base_name=`basename $VENV` + git_init="git init ${VENV}/ansible_collections --quiet" + fi + + message "Creating 'ibm_zos_core' collection from the local GH branch: '$GH_BRANCH'." . $VENV_BIN/activate && rm -rf ibm-ibm_zos_core-*.tar.gz && \ - $VENV_BIN/ansible-galaxy collection build && \ - $VENV_BIN/ansible-galaxy collection install -f ibm-ibm_zos_core-* + $VENV_BIN/ansible-galaxy collection build + + message "Installing 'ibm.ibm_zos_core' collection to ${base_name}." + . $VENV_BIN/activate && $VENV_BIN/ansible-galaxy collection install -f ibm-ibm_zos_core-* ${galaxy_path} && ${git_init} } # ------------------------------------------------------------------------------ -# Run galaxy importer on collection. +# Build, install and validate the collection with 'galaxy importer'. This operation +# is performed on the host, not on a venv. # ------------------------------------------------------------------------------ #->ac-galaxy-importer: -## Build current branch and run galaxy importer on collection. -## Usage: ac [--ac-galaxy-importer] +## Build current branch and run galaxy importer on the collection. +## Usage: ac --ac-galaxy-importer ## Example: ## $ ac --ac-galaxy-importer ac_galaxy_importer(){ - message "Running Galaxy Importer" - . $VENV_BIN/activate && collection_name=$($VENV_BIN/ansible-galaxy collection build --force | awk -F/ '{print $NF}') && python -m galaxy_importer.main $collection_name + message "Creating 'ibm_zos_core' collection with branch: '$GH_BRANCH'." + . $VENV_BIN/activate && collection_name=$($VENV_BIN/ansible-galaxy collection build --force | awk -F/ '{print $NF}') && ls -la $collection_name + + message "Running Galaxy Importer for collection $collection_name" + . $VENV_BIN/activate && python3 -m galaxy_importer.main $collection_name } -# Run a changelog lint locally +# ------------------------------------------------------------------------------ +# Perform changelog operations on th elocal branch. +# TODO: Add the ability to create a summary. # ------------------------------------------------------------------------------ #->ac-changelog: -## Runs antsibull-changelog to generate the release changelog or perform a lint -## on changelog fragments or release notes. -## Usage: ac [--ac-changelog <command>] -## <command> - choose from 'init', 'lint', 'lint-changelog-yaml', 'release', 'generate' -## - generate generate the changelog -## - init set up changelog infrastructure for collection, or an other project -## - lint check changelog fragments for syntax errors -## - lint-changelog-yaml check syntax of changelogs/changelog.yaml file -## - release add a new release to the change metadata +## Perform antsibull-changelog operations such as lint, release and generate, etc +## Usage: ac --ac-changelog [--command <str>] +## Options: +## command (optional) +## - choose from 'generate', 'lint', 'lint-changelog-yaml', 'init', 'release', +## - generate, TODO: Needs doc +## - lint, (default) check changelog fragments for syntax errors +## - lint-changelog-yaml, check syntax of changelogs/changelog.yaml file +## - init, set up changelog infrastructure for collection, or an other project +## - release, add a new release to the change metadata ## Example: ## $ ac --ac-changelog --command lint ## $ ac --ac-changelog --command release ## $ ac --ac-changelog ac_changelog(){ option_command=$1 - if [ ! "$option_command" ]; then - option_command="lint" - fi - message "Running Changelog '$option_command'" + message "Performing changelog operation '$option_command'" . $VENV_BIN/activate && antsibull-changelog "${option_command}" } # ------------------------------------------------------------------------------ -# Install an ibm_zos_core collection from galaxy (or how you have ansible.cfg configured) +# Install an ibm_zos_core collection from repository # ------------------------------------------------------------------------------ #->ac-install: -## Install collection 'ibm_zos_core' from a repository such as Galaxy. If no -## version is specified, latest GA level in repository will be installed. -## Usage: ac [--ac-install] [--version <version>] +## Install collection 'ibm_zos_core' from galaxy. If no version is specified, +## the latest GA level in repository will be installed. +## Usage: ac --ac-install [--version <int>] [--name <str>] ## Options: -## version - The collection version +## version (optional) +## - The collection version +## name (optional) +## - The location to install, valid locations are venv names or 'local'. +## - Default, latest venv, eg venv-2.xx +## - If value 'local', collection is installed on the host. ## Example: +## $ ac --ac-install --version 1.5.0-beta.1 --name venv-2.16 +## $ ac --ac-install --version 1.5.0-beta.1 --name local ## $ ac --ac-install --version 1.5.0-beta.1 ## $ ac --ac-install ac_install(){ option_version=$1 + option_name=$2 + galaxy_path="" # Empty installs to host default + git_init="" + base_name="" + + # There must be a parent git directory in non-default collection installations, see issues + # https://github.com/ansible/ansible/issues/68499#issuecomment-873660057 + # https://github.com/ansible/ansible/issues/63032 + # Work around is to perform a git init . and create an empty repo where the collection is installed, + # does not seem to to be an issue with the host installation thus far, only venv's. This is required + # for ansible-test sanity tests to run, else sanity fails with 'WARNING: All targets skipped.' + + if [ "$option_name" ]; then + if [ "$option_name" == "local" ];then + base_name="$HOME/.ansible/collections/ansible_collections" + else + VENV=`validate_venv $option_name` + galaxy_path="-p ${VENV}" + base_name=`basename $VENV` + git_init="git init ${VENV}/ansible_collections --quiet" + fi + else + galaxy_path="-p ${VENV}" + base_name=`basename $VENV` + git_init="git init ${VENV}/ansible_collections --quiet" + fi if [ "$option_version" ];then - message "Installing 'ibm.ibm_zos_core' collection version=${option_version}." - . $VENV_BIN/activate && $VENV_BIN/ansible-galaxy collection install -fc ibm.ibm_zos_core:${option_version} + message "Installing 'ibm.ibm_zos_core' collection version=${option_version} into ${base_name}." + . $VENV_BIN/activate && $VENV_BIN/ansible-galaxy collection install -fc ibm.ibm_zos_core:${option_version} ${galaxy_path} && ${git_init} else - message "Installing 'ibm.ibm_zos_core' lastet GA version." - . $VENV_BIN/activate && $VENV_BIN/ansible-galaxy collection install -fc ibm.ibm_zos_core + message "Installing 'ibm.ibm_zos_core' lastet GA version into ${VENV}." + . $VENV_BIN/activate && $VENV_BIN/ansible-galaxy collection install -fc ibm.ibm_zos_core ${galaxy_path} && ${git_init} fi } -# Run a make module doc +# ------------------------------------------------------------------------------ +# Generate module documentation, this will crate the *.rst in the local repo # ------------------------------------------------------------------------------ #->ac-module-doc: -## Runs make module-doc to generate the module documentation -## Usage: ac [--ac-module-doc] +## Generate module doc with options. Default behavior is to clean and then generate +## module doc in RST. All options are appended makefile targets clean and module-doc. +## If clean is seleceted, only clean is executed. +## Usage: ac --ac-module-doc [--command <str,str>] +## Options: +## command (optional) +## - Space or comma delimited make file targets to append to clean and module-doc. +## - If clean is selected it will be the only makefile target run. +## - choose from target 'role', 'html', 'clean'. +## - role, generate role documenation. +## - html, generate HTML and launch it in a local browser for viewing. +## - clean, remove staging directories used to generate HTML. +## - options are case sensitive. ## Example: +## $ ac --ac-module-doc --command html,role +## $ ac --ac-module-doc --command clean ## $ ac --ac-module-doc ac_module_doc(){ - message "Running make module-doc" - . $VENV_BIN/activate && cd docs/ && make module-doc + option_command=$1 + cmd="make clean; make module-doc;" + + # Invoke shell script helpers to set variables if host is not null + if [ ! -z "${option_command}" ]; then + + count_delim=`echo $option_command | awk -F "," '{print NF-1}'` + + if [ $count_delim -gt 0 ]; then + # Parse comma delimited string, clean is already in the base command so ignored. + for command in $(echo $option_command | sed "s/,/ /g"); do + if [ "$command" == "role" ];then + cmd="${cmd} make role-doc;" + elif [ "$command" == "html" ];then + cmd="${cmd} make html; make view-html;" + fi + done + else + if [ "$command" == "role" ];then + cmd="${cmd} make role-doc;" + elif [ "$command" == "html" ];then + cmd="${cmd} make html; make view-html;" + elif [ "$command" == "clean" ];then + cmd="make clean;" + fi + fi + fi + + # Must install collection on the control node to gen doc because doc needs + # the collections doc fragments + ac_build "local" + + # Force the venv-doc virtual environment designed for doc generation. + VENV_BIN="$VENV_HOME_MANAGED"/venv-doc/bin + message "Generating module documentation for branch '$GH_BRANCH'." + . $VENV_BIN/activate && export ANSIBLE_LIBRARY="$HOME/.ansible/collections/ansible_collections/ibm/ibm_zos_core/plugins/modules"; cd docs/ ; eval ${cmd} } # ------------------------------------------------------------------------------ -# Run ansible-lint on the locally checked out GH Branch +# Run ansible-lint on the local GH Branch # ------------------------------------------------------------------------------ #->ac-lint: ## Run ansible-lint on the local GH branch with the production profile. -## Usage: ac [--ac-lint] +## Usage: ac --ac-lint ## Example: ## $ ac --ac-lint ac_ansible_lint(){ - gh_branch=`git branch |grep "*" | cut -d" " -f2` - message "Linting with ansible-lint on GH branch: '$gh_branch'." + message "Linting with ansible-lint on branch: '$GH_BRANCH'." . $VENV_BIN/activate && $VENV_BIN/ansible-lint --profile production } # ------------------------------------------------------------------------------ # Run the sanity test using docker given python version else default to venv +# TODO: investigate validate: +# https://docs.ansible.com/ansible/latest/dev_guide/testing/sanity/validate-modules.html#extending-validate-modules # ------------------------------------------------------------------------------ #->ac-sanity: ## Run ansible-test in docker if the docker engine is running, else run them in ## a managed virtual environment using the installed python version. -## Usage: ac [--ac-sanity] [--version <version>] +## Usage: ac --ac-sanity [--version <float>] [--name <str>] ## Options: -## <version> - Only applies to when docker is running. -## - No version selection will run all available python versions in docker. -## - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', .... +## version (optional) +## - Only applies when a container is running. +## - choose from '2.6', '2.7', '3.5', '3.6', '3.7', '3.8', '3.9', .... +## - No version selection will run all available python versions in the container. +## name (optional) +## - The location of collection, valid locations are venv names or 'local'. +## - Default, latest venv, eg venv-2.xx +## - If value 'local', collection is installed on the host. ## Example: -## $ ac --ac-sanity +## $ ac --ac-sanity --version 3.10 --name local ## $ ac --ac-sanity --version 3.10 +## $ ac --ac-sanity ac_sanity(){ option_version=$1 + option_name=$2 + collection_path="" + base_name="" + + if [ "$option_name" ]; then + if [ "$option_name" == "local" ];then + collection_path="$HOME/.ansible/collections/ansible_collections/ibm/ibm_zos_core/" + # Must install collect to have parity with ansible-test + ac_build $option_name + else + VENV=`validate_venv $option_name` + collection_path="${VENV}/ansible_collections/ibm/ibm_zos_core/" + base_name=`basename $VENV` + # Must install collect to have parity with ansible-test + ac_build $option_name + fi + else + collection_path="${VENV}/ansible_collections/ibm/ibm_zos_core/" + base_name=`basename $VENV` + # Must install collect to have parity with ansible-test + ac_build + fi + if [ "${DOCKER_INFO}" == "0" ]; then if [ "${option_version}" ]; then - message "Running ansible-test with docker container and python version ${option_version}." - . $VENV_BIN/activate && export ANSIBLE_TEST_PREFER_PODMAN=1 && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ - ${VENV_BIN}/ansible-test sanity --python ${option_version} --requirements --docker default && \ - cd ${CURR_DIR}; + message "Running ansible-test in a container with python ${option_version} and collection ${base_name}." + . $VENV_BIN/activate && export ANSIBLE_TEST_PREFER_PODMAN=1 && cd ${collection_path} && \ + ${VENV_BIN}/ansible-test sanity --python ${option_version} --requirements --docker default && \ + cd ${CURR_DIR}; else - message "Running ansible-test with docker container and all python versions." - . $VENV_BIN/activate && export ANSIBLE_TEST_PREFER_PODMAN=1 && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + message "Running ansible-test in a container all python versions and collection ${base_name}." + . $VENV_BIN/activate && export ANSIBLE_TEST_PREFER_PODMAN=1 && cd ${collection_path} && \ ${VENV_BIN}/ansible-test sanity --requirements --docker default && \ cd ${CURR_DIR}; fi @@ -423,37 +597,55 @@ ac_sanity(){ . $VENV_BIN/activate && VENV_PY_VER=`python3 --version | cut -d" " -f2 | cut -d"." -f1,2` message "Running ansible-test with managed python virtual environment: ${VENV}." - . $VENV_BIN/activate && cd ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core && \ + . $VENV_BIN/activate && cd ${collection_path} && \ ${VENV_BIN}/ansible-test sanity --python ${VENV_PY_VER} --requirements && \ cd ${CURR_DIR}; fi } # ------------------------------------------------------------------------------ -# Run functional tests: +# Run collection test cases using the pytest -ziventory fixture. Setting --name, +# instructs the 'ac' tool which managed venv to use to run pytest. The collection +# being tested must reside in the same managed venv, there is no option today to +# choose the location of the collection and the named venv. For that we would need +# a --location option, thus locally installed collections are not supported, all +# collections must be installed into one of the managed venvs. +# TODO: If --location is to be supported, the ANSIBLE_LIBRARY and ANSIBLE_CONFIG , would need to point to localhost # ------------------------------------------------------------------------------ #->ac-test: -## Run the functional tests inside the managed python virtual environment. -## Usage: ac [--ac-test] [--host <host>] [--python <python>] [--zoau <zoau>] [--file <file>] [--debug <boolean>] +## Build local branch, install and run tests in the managed venv. +## Usage: ac --ac-test [--host <str>] [--python <float>] [--zoau <float>] [--file <str>] [--debug <boolean>] [--name <str>] ## Options: -## host - z/OS managed node to run test cases, no selection defaults to -## a host registerd to your user id (`whoami`). -## python - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11, -## no selection defauls to 3.8. -## zoau - ZOAU to use in testing, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1, -## no selection defaults to 1.1.1 . -## file - the absoulte path to a test suite to run, no selection -## defaults to all test suite running. -## test - a test case to run found in 'file', no selection -## defaults to all tests in file running. -## debug - enable debug for pytest (-s), choices are true and false +## host (optional) +## - z/OS managed node to run test cases, no selection defaults to +## a host registerd to your the user id (`whoami`). +## python (optional) +## - IBM enterprise python version, choices are 3.8, 3.9, 3.10, 3.11, 3.12 +## no selection defauls to 3.8. +## zoau (optional) +## - ZOAU to use in testing, choices are 1.0.3, 1.1.1, 1.2.0, 1.2.1, +## no selection defaults to 1.1.1 . +## file (optional) +## - the absoulte path to a test suite to run, no selection +## defaults to all test suite running. +## test (optional) +## - a test case to run found in 'file', no selection +## defaults to all tests in file running. +## debug (optional) +## - enable debug for pytest (-s), choices are true and false +## name (optional) +## - The managed venv to use to run the test instance. +## - Default, venv with largest value, eg venv-2.17 +## - A name must be a managed venv, lochost installatiosn are not supported. ## Example: -## $ ac --ac-test --host ec01150a --python 3.10 --zoau 1.2.2\ +## $ ac --ac-test --host ec01150a --python 3.11 --zoau 1.3.1\ ## $ --file tests/functional/modules/test_zos_operator_func.py --test test_zos_operator_positive_path --debug true -## $ ac --ac-test --host ec33012a --python 3.10 --zoau 1.2.2 --file tests/functional/modules/test_zos_operator_func.py --debug true +## $ ac --ac-test --host ec33012a --python 3.11 --zoau 1.3.1 --file tests/functional/modules/test_zos_operator_func.py --debug true +## $ ac --ac-test --host ec01130a --python 3.11 --zoau 1.3.1 --file invalid/test/returns/rc/of/4/to/stderr 2>>/dev/null +## $ ac --ac-test --host ec01130a --python 3.11 --zoau 1.3.1 --file tests/functional/modules/test_zos_tso_command_func.py --name venv-2.17 ## $ ac --ac-test --file tests/functional/modules/test_zos_operator_func.py --debug true ## $ ac --ac-test -## $ ac --ac-test --host ec01130a --python 3.10 --zoau 1.3.1 --file invalid/test/returns/rc/of/4/to/stderr 2>>/dev/null + ac_test(){ host=$1 python=$2 @@ -461,8 +653,19 @@ ac_test(){ file=$4 test=$5 debug=$6 + option_name=$7 - # Run test by node IDs, eg pytest -v tests/my-directory/test_demo.py::test_specific_function + # Check that a collection is installed in the named venv or default venv. + ac_version $option_name + + # If a --name has been passed, update the the VENV var path appropriately. + if [ "$option_name" ]; then + VENV=`validate_venv $option_name` + VENV_BIN=$VENV/bin + VENV_BASENAME=`basename $VENV` + fi + + # If test is parametized build a pytest string, eg pytest -v tests/my-directory/test_demo.py::test_specific_function if [ "$file" ] && [ "$test" ]; then file="${file}::${test}" fi @@ -478,16 +681,15 @@ ac_test(){ # Check configuration was created in venv/config.yml, else error and exit if test ! -e ${VENV}/config.yml; then - echo "No configuration was able to be created in ${VENV}/config.yml " - exit 1 + message_error "Unable to find test configration in ${VENV}/config.yml." fi if [ "$file" ]; then - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest $CURR_DIR/${file} --ignore="${skip}" --host-pattern=all --zinventory=${VENV}/config.yml ${debug} >&2 ; echo $? >&1 + . ${VENV_BIN}/activate && export ANSIBLE_LIBRARY=$VENV/ansible_collections/ibm/ibm_zos_core/plugins/modules;export ANSIBLE_CONFIG=$VENV/ansible.cfg;${VENV_BIN}/pytest $CURR_DIR/${file} --ignore="${skip}" --host-pattern=all --zinventory=${VENV}/config.yml ${debug} >&2 ; echo $? >&1 else for file in `ls tests/functional/modules/*.py`; do if [ "$file" != "$skip" ]; then - . ${VENV_BIN}/activate && ${VENV_BIN}/pytest $CURR_DIR/${file} --ignore="${skip}" --host-pattern=all --zinventory=${VENV}/config.yml ${debug} >&2 ; echo $? >&1 + . ${VENV_BIN}/activate && export ANSIBLE_LIBRARY=$VENV/ansible_collections/ibm/ibm_zos_core/plugins/modules;export ANSIBLE_CONFIG=$VENV/ansible.cfg;${VENV_BIN}/pytest $CURR_DIR/${file} --ignore="${skip}" --host-pattern=all --zinventory=${VENV}/config.yml ${debug} >&2 ; echo $? >&1 fi done fi @@ -497,14 +699,20 @@ ac_test(){ } # ------------------------------------------------------------------------------ -# Run concurrent executor: +# Run concurrent executor for identified tests cases. Setting --name, +# instructs the 'ac' tool which managed venv to use to run pytest. The collection +# being tested must reside in the same managed venv, there is no option today to +# choose the location of the collection and the named venv. For that we would need +# a --location option, thus locally installed collections are not supported, all +# collections must be installed into one of the managed venvs. +# TODO: If --location is to be supported, the ANSIBLE_LIBRARY and ANSIBLE_CONFIG , would need to point to localhost # ------------------------------------------------------------------------------ #->test-concurrent: ## Run the conncurrent executor (CE) that can drive test cases to a cluster of hosts. ## Usage: ac --test-concurrent [--host <str, str>] [--user <str>] --python <str> [--zoau <str>] [--pythonpath <str>] ## [--volumes <str, str>] [--file <str, str>] [--skip <str, str>] [--itr <int>] [--replay <int>] ## [--timeout <int>] [--throttle <bool>] [--workers <int>] [--maxjob <int>] [--maxnode <int>] -## [--bal <int>] [--verbose <bool>] [--verbosity <int>] [--debug <bool>] [--extra <str>] +## [--bal <int>] [--verbose <bool>] [--verbosity <int>] [--debug <bool>] [--extra <str>] [--name <str>] ## Options: ## host (optional): ## - Space or comma delimited managed nodes to use. @@ -513,7 +721,7 @@ ac_test(){ ## - Only the host prefix is needed, e.g. 'ec01150a' ## user (optional): ## - Ansible user authorized to run tests on the managed node. -## python (requred): -> +## python (requred): ## - IBM enterprise python version, e.g 3.10', '3.11', '3.12' ## zoau (optional): ## - ZOAU version to use. e.g. 1.2.5, 1.3.0, 1.3.1 @@ -523,7 +731,7 @@ ac_test(){ ## - Default is to use the precompiled binary (until we establish wheel locations) ## volumes (optional): ## - The volumes to use with the test cases, overrides the auto volume assignment. -# - Defaults to, "222222,000000" +## - Defaults to, "222222,000000" ## file (optional): ## - Space or comma delimited test suites that should be included in the result. ## - A test suite is a collection of test cases in a file that starts with @@ -621,6 +829,10 @@ ac_test(){ ## - A zero return code means the overall execution has successed for the configuration submitted, ## where a non-zero return code represents the number of failed tests. ## - Default is False +## name (optional) +## - The managed venv to use to run the test instance. +## - Default, venv with largest value, eg venv-2.17 +## - A name must be a managed venv, lochost installatiosn are not supported. ## Example: ## $ ac --test-concurrent --host ec01130a --python 3.11 --zoau 1.3.0 ## $ ac --test-concurrent --host ec01130a --python 3.11 --zoau 1.3.0 --file test_zos_operator_func.py --debug true @@ -681,22 +893,61 @@ test_concurrent(){ debug="${19}" extra="${20}" returncode="${21}" + option_name="${22}" + + # Check that a collection is installed in the named venv or default venv. + ac_version $option_name + + # If a --name has been passed, update the the VENV var path appropriately. + if [ "$option_name" ]; then + VENV=`validate_venv $option_name` + VENV_BIN=$VENV/bin + VENV_BASENAME=`basename $VENV` + fi # Invoke shell script helpers to set variables if host is not null - if [ ! -z "${host}" ]; then - hostname=$($VENV/./venv.sh --host-credentials "${host}") + if [ ! -z "${host}" ]; then - if [ -z "${user}" ]; then - user=$($VENV/./venv.sh --user-credentials "${host}") - fi + count_delim=`echo $host | awk -F "," '{print NF-1}'` - if [ -z "${pass}" ]; then - pass=$($VENV/./venv.sh --pass-credentials "${host}") - fi + if [ $count_delim -gt 0 ]; then + first_entry=true + # Parse comma delimited string, for each entry perfom an operaion. + for host_entry in $(echo $host | sed "s/,/ /g"); do + if [ "$first_entry" == "true" ]; then + first_entry=false + + all_hosts=$($VENV/./venv.sh --host-credentials "${host_entry}") + + if [ -z "${user}" ]; then + user=$($VENV/./venv.sh --user-credentials "${host_entry}") + fi + + if [ -z "${pass}" ]; then + pass=$($VENV/./venv.sh --pass-credentials "${host_entry}") + fi + else + host_entry=$($VENV/./venv.sh --host-credentials "${host_entry}") + all_hosts="${all_hosts},${host_entry}" + fi + done - host=$hostname + host=$all_hosts + else + host_entry=$host + host=$($VENV/./venv.sh --host-credentials "${host_entry}") + + if [ -z "${user}" ]; then + user=$($VENV/./venv.sh --user-credentials "${host_entry}") + fi + + if [ -z "${pass}" ]; then + pass=$($VENV/./venv.sh --pass-credentials "${host_entry}") + fi + fi fi + # Convert the python from short notation to absolute path python=$($VENV/./mounts.sh --get-python-mount "${python}") @@ -843,8 +1094,9 @@ test_concurrent(){ # "verbosity=${verbosity} debug=${debug} extra=${extra} returncode=${returncode}" # read _host _user _pass <<<$($VENV/./venv.sh --host-credentials "${host}") + #export ANSIBLE_LIBRARY=$VENV/ansible_collections/ibm/ibm_zos_core/plugins/modules;export ANSIBLE_CONFIG=$VENV/ansible.cfg; message "Concurrent executor testing is evaluating supplied options and preparing to execute." - . $VENV_BIN/activate && python3 $VENV/ce.py\ + . $VENV_BIN/activate && export ANSIBLE_LIBRARY=$VENV/ansible_collections/ibm/ibm_zos_core/plugins/modules;export ANSIBLE_CONFIG=$VENV/ansible.cfg;python3 $VENV/ce.py\ --hostnames "${host}"\ --user "${user}"\ --pyz "${python}"\ @@ -873,20 +1125,31 @@ test_concurrent(){ # ------------------------------------------------------------------------------ #->ac-test-config: ## Disply the contents of configuration file used to run functional tests. -## Usage: ac [--ac-test-config] +## Usage: ac --ac-test-config [--name <str>] +## Options: +## name (optional) +## - The test configuration location which must be a managed venv name, e.g 'venv-2.16' +## - The default location is the latest managed venv. ## Example: +## $ ac --ac-test-config --name venv-2.17 ## $ ac --ac-test-config ac_test_config(){ + option_name=$1 + if [ "$option_name" ]; then + VENV=`validate_venv $option_name` + base_name=`basename $VENV` + fi + if [ -f "${VENV}/config.yml" ]; then - message "Print test configuration used for functional testing." + message "Configuration for automated testing in ${base_name}." cat ${VENV}/config.yml; else - message "No configuration was found, run '--ac-test' to generate a configuration." + message_error "Configuration was found in ${option_name}, run '--ac-test' to generate a configuration." fi } # ------------------------------------------------------------------------------ -# Get a list of all test cases from the dependency finder depending on options +# Get a list of test cases from the dependency finder depending on options. # ------------------------------------------------------------------------------ #->test-dep-find: ## Determine which test suites to run given the options selected. @@ -905,7 +1168,7 @@ ac_test_config(){ ## - Default is to skip 'test_module_security.py', this can not be removed but ## it can be replaced with another test or tests. ## pretty (optional): -## - Pretty formatting where each value is a line follwoed by a line feed, +## - Pretty formatting where each value is a line followed by a line feed, ## otherwise a list[str] format is returned. ## Example: ## $ ac --test-dep-find --branch main --skip "test_module_security.py,test_zos_apf_func.py" --pretty False @@ -916,7 +1179,6 @@ ac_test_config(){ ac_test_dep_finder(){ branch=$1 skip="$2" - gh_branch=`git branch |grep "*" | cut -d" " -f2` # Convert any comma separated strings to space delimited as needed by the tooling. first_entry=true @@ -938,7 +1200,7 @@ ac_test_dep_finder(){ message "Compiling a list functional and unit tests suites excluding skipped tests." . $VENV_BIN/activate && result=`$VENV_BIN/python ${VENV}/dependencyfinder.py -p ${CURRENT_DIR} -a -s "${skip}"` else - message "Compiling a list dependent tests cases to run based on the changes between local branch '$gh_branch' and target branch '$branch', excluding skipped tests." + message "Compiling a list dependent tests cases to run based on the changes between local branch '$GH_BRANCH' and target branch '$branch', excluding skipped tests." . $VENV_BIN/activate && result=`$VENV_BIN/python ${VENV}/dependencyfinder.py -p ${CURRENT_DIR} -b ${branch} -s "${skip}" -m` fi @@ -990,7 +1252,9 @@ ac_test_dep_finder(){ ac_test_pytest_finder(){ file=$1 skip="$2" - slience_messages="$4" # Undocumented internal interface option to disable INFO messages + # Uppercase value for --pretty + pretty=`echo $3 | tr '[:lower:]' '[:upper:]'` + slience_messages="$4" # This is an undocumented option to disable INFO messages first_entry=true file_tests="" @@ -1071,8 +1335,6 @@ ac_test_pytest_finder(){ . $VENV_BIN/activate && result=`export PYTHONPATH=$VENV;$VENV_BIN/python -c "from modules.utils import get_test_cases;from os import environ;\ tests = get_test_cases(paths=environ['env_file'], skip=environ['env_skip']);all=','.join(tests);print(all)"` - # Uppercase value for --pretty - pretty=`echo $3 | tr '[:lower:]' '[:upper:]'` if [ "$pretty" == "TRUE" ];then echo $result |tr ',' '\n'; else @@ -1086,14 +1348,49 @@ ac_test_pytest_finder(){ # Check the version of the ibm_zos_core collection installed # ------------------------------------------------------------------------------ #->ac-version: -## Obtain the version of the collection installed on the controller. -## Usage: ac [--ac-version] +## List the version of the collection installed in a managed venv or the controller. +## Usage: ac --ac-version [--name <str>] +## Options: +## name (optional) +## - The test configuration location which must be a managed venv name, e.g 'venv-2.16' +## - The default location is the latest managed venv. +## Options: +## name (optional) +## - The location the collection is installed. +## - The default is the latest venv. +## - If value 'local' is set, it will search localhost for the collection. ## Example: +## $ ac --ac-version --name venv-2.17 ## $ ac --ac-version ac_version(){ - message "Ansible collection version installed on this controller." - cat ~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/MANIFEST.json \ - | grep version|cut -d ':' -f 2 | sed 's/,*$//g' | tr -d '"'; + option_name=$1 + collection_path="" # No value, installs to host default + base_name="" + + + if [ "$option_name" ]; then + if [ "$option_name" == "local" ];then + collection_path="$HOME/.ansible/collections/ansible_collections" + base_name=`hostname` + else + VENV=`validate_venv $option_name` + collection_path="${VENV}/ansible_collections" + base_name=`basename $VENV` + fi + else + collection_path="${VENV}/ansible_collections" + base_name=`basename $VENV` + fi + + manifest=$(cat ${collection_path}/ibm/ibm_zos_core/MANIFEST.json > /dev/null 2>&1) + rc=$(echo $?) + if [ $rc -eq 0 ]; then + message "Discovered ibm_zos_core collection installation in ${base_name}." + cat ${collection_path}/ibm/ibm_zos_core/MANIFEST.json \ + | grep version|cut -d ':' -f 2 | sed 's/,*$//g' | tr -d '"'; + else + message_error "Unable to find a collection at $collection_path, consider running 'ac --install --name $option_name'." + fi } # ------------------------------------------------------------------------------ @@ -1163,37 +1460,76 @@ file_decrypt(){ chmod 700 $option_out_file } - -# Cleanup and remove geneated doc for the collection if its not going to be -# checked in -# Example: -# $ make cleanDoc -clean(){ - echo Todo - # @. $(VENV_BIN)/activate && make -C docs clean -} - -clean_doc(){ - echo Todo - # cleanDoc - # @. $(VENV_BIN)/activate && make -C docs clean -} - #->host-auth: ## Copy your ssh key to a `host` or the default which is your username. -## Usage: ac [--host-auth] [--host <host>] +## Usage: ac --host-auth [--host <str,str>] ## Options: ## host - z/OS managed node, no selection defaults to ## a host registerd to your user id (`whoami`). +## Options: +## host (optional): +## - Space or comma delimited string of host names, eg 'ec1234a,ec4321a' +## - Defaults to a host registered to your local laptops username , eg `whoami` +## - If more than one host name is set, the command will concurrently connect +## to each host, and 'wait' for completion and print the results. ## Example: +## $ ac --host-auth --host ec33012a,ec03190a,ec01135a ## $ ac --host-auth --host ec33012a host_auth(){ option_host=$1 if [ ! "$option_host" ]; then host=`whoami` + else + count_delim=`echo $option_host | awk -F "," '{print NF-1}'` + + if [ $count_delim -gt 0 ]; then + pids="" + first_entry=true + # Parse comma delimited string, for each entry perfom an operaion. + for host in $(echo $option_host | sed "s/,/ /g"); do + message "Copy SSH keys to the managed node $host" + touch /tmp/auth.${host} + # Put command in background + $VENV/./venv.sh --cert $host >/tmp/auth.${host} 2>&1 & + # Capture the pid for identification later + pid=$! + # Sleep helps with target file does not exit + sleep .5 + + mv /tmp/auth.${host} /tmp/auth.${pid}.${host} + if [ "$first_entry" == "true" ]; then + first_entry=false + pids="${pid}" + else + pids="${pids},${pid}" + fi + done + + message "Processes peforming authenication [ $pids ]." + message "Waiting on processes to complete remote authentication." + # Pefrom a global wait here to force all the mounts to finish (can also use jobs command to show more) + wait + + # Wait is now done, run trhough the pids and evaluate + for p in $(echo $pids | sed "s/,/ /g"); do + # '#?' after 'ps -p' will return 0 if the process is running, otherwise 1 + auth_rc=$(ps -p $p >/dev/null;echo $?) + auth_output=`ls /tmp/auth.${p}*` + auth_output_pid=`echo $auth_output | cut -d'.' -f2` + auth_output_host=`echo $auth_output | cut -d'.' -f3` + if [ $auth_rc -eq 1 ]; then + message "Authentication for $auth_output_host completed." + else + message "Unable to determine authenciation status for ${auth_output_host}." + fi + cat $auth_output + rm -rf ${auth_output} + done + else + message "Copy SSH keys to the managed node $option_host" + $VENV/./venv.sh --cert $option_host + fi fi - message "Copy SSH keys to the managed node $option_host" - $VENV/./venv.sh --cert $option_host } # ------------------------------------------------------------------------------ @@ -1201,23 +1537,75 @@ host_auth(){ # mount script. Should automatically authenticate your ssh key. # ------------------------------------------------------------------------------ #->host-mount: -## Copy mount and profile scripts in users home directory and excute the mount. -## Usage: ac [--host-mount --host <host>] +## Copy helper scripts to remote machine and then perform mounts. +## Usage: ac --host-mount [--host <str,str>] ## Options: -## host - z/OS managed node id, no selection defaults to -## a host registerd to your user id (`whoami`). +## host (optional): +## - Space or comma delimited string of host names, eg 'ec1234a,ec4321a' +## - Defaults to a host registered to your local laptops username , eg `whoami` +## - If more than one host name is set, the command will concurrently connect +## to each host, and 'wait' for completion and print the results. ## Example: +## $ ac --host-mount --host ec33012a,ec03190a,ec01135a ## $ ac --host-mount --host ec33012a host_mount(){ option_host=$1 if [ ! "$option_host" ]; then option_host=`whoami` + else + count_delim=`echo $option_host | awk -F "," '{print NF-1}'` + + if [ $count_delim -gt 0 ]; then + pids="" + first_entry=true + # Parse comma delimited string, for each entry perfom an operaion. + for host in $(echo $option_host | sed "s/,/ /g"); do + host_auth $host + rc=$? + if [ ${rc} -eq 0 ]; then + message "Copying mount.env, mount.sh, profile.sh scripts to host $host and then performing mounts." + touch /tmp/mount.${host} + $VENV/./venv.sh --host-setup-files $host $VENV/"mounts.env" $VENV/"mounts.sh" $VENV/"profile.sh" >/tmp/mount.${host} 2>&1 & + pid=$! + # Sleep helps with target file does not exit + sleep .5 + mv /tmp/mount.${host} /tmp/mount.${pid}.${host} + + if [ "$first_entry" == "true" ];then + first_entry=false + pids="${pid}" + else + pids="${pids},${pid}" + fi + fi + done + + message "Processes peforming mounts [ $pids ]" + message "Waiting on processes to complete remote mounts." + # Pefrom a global wait here to force all the mounts to finish + wait + + # Wait is now done, run trhough the pids and evaluate + for p in $(echo $pids | sed "s/,/ /g"); do + # '#?' after 'ps -p' will return 0 if the process is running, otherwise 1 + mount_rc=$(ps -p $p >/dev/null;echo $?) + mount_output=`ls /tmp/mount.${p}*` + mount_output_pid=`echo $mount_output | cut -d'.' -f2` + mount_output_host=`echo $mount_output | cut -d'.' -f3` + if [ $mount_rc -eq 1 ]; then + message "Mounts for $mount_output_host completed." + else + message "Unable to determine status for ${mount_output_host}." + fi + cat ${mount_output} + rm -rf ${mount_output} + done + else + host_auth $option_host + message "Copying mount.env, mount.sh, profile.sh scripts to host $option_host and then performing mounts." + $VENV/./venv.sh --host-setup-files $option_host $VENV/"mounts.env" $VENV/"mounts.sh" $VENV/"profile.sh" + fi fi - host_auth $option_host - message "Copying mount.env, mount.sh, profile.sh scripts to host $option_host and then mounting shared drive." - #$VENV/./hosts.sh --cert $1 - $VENV/./venv.sh --host-setup-files $option_host $VENV/"mounts.env" $VENV/"mounts.sh" $VENV/"profile.sh" - #$VENV/./hosts.sh --mount $1 $VENV/"mounts.env" $VENV/"mounts.sh" $VENV/"shell-helper.sh" $VENV/"profile.sh" } # ------------------------------------------------------------------------------ @@ -1225,7 +1613,7 @@ host_mount(){ # ------------------------------------------------------------------------------ #->host-mounts: ## Print the ZOAU and Python mount tables used by this utility. -## Usage: ac [--host-mounts] +## Usage: ac --host-mounts ## Example: ## $ ac --host-mounts host_mounts(){ @@ -1234,28 +1622,48 @@ host_mounts(){ } # ------------------------------------------------------------------------------ -# Print the z/OS node IDs and hostnames +# Print the z/OS host IDs and hostnames # ------------------------------------------------------------------------------ #->host-nodes: -## Display the z/OS node IDs and hostnames -## Usage: ac [--host-nodes --all <boolean>] +## Print all managed nodes hostnames. +## Usage: ac --host-nodes [--all <bool>] [--pretty <bool>] +## Options: +## all - Print all managed nodes hostnames. ## Options: -## all - A list of all nodes, default is true. If all is set to false, -## only a list space delimited nodes are returned. +## all (optional): +## - Option will control what content is printed. +## - If true (default), both the ID and hostname are printed as space delimited. +## - If false, only the hostnames marked for production are printed as space delimited. +## pretty (optional): +## - Pretty formatting where each value is a line followed by a line feed, +## otherwise a list[str] format is returned. ## Example: +## $ ac --host-nodes [--all <bool>] [--pretty <bool>] ## $ ac --host-nodes ## $ ac --host-nodes --all false host_nodes(){ + # Uppercase value for --all + all=`echo $1 | tr '[:lower:]' '[:upper:]'` + pretty=`echo $2 | tr '[:lower:]' '[:upper:]'` - - if [ "$all" == "false" ]; then - message "Print z/OS production hostnames." + if [ "$all" == "FALSE" ]; then + message "Producution managed hosts." result=`$VENV/venv.sh --targets-production` + if [ "$pretty" == "TRUE" ];then + echo $result |tr ' ' '\n'; + else + echo [$result] |tr ' ' ', '; + fi else - message "Print z/OS node IDs and hostnames." + message "All managed hosts." result=`$VENV/venv.sh --targets` + if [ "$pretty" == "TRUE" ];then + echo $result |sed 's/ID/\n&/g' + else + result=`echo $result |sed 's/ID/,&/g' | cut -c2-` + echo [$result] + fi fi - echo $result } # ------------------------------------------------------------------------------ @@ -1649,36 +2057,36 @@ done if [ "$option_submitted" ] && [ "$option_submitted" = "--ac-bandit" ] ; then ac_bandit $level elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-build" ] ; then - ac_build + ac_build $name elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-galaxy-importer" ] ; then ac_galaxy_importer elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-changelog" ] ; then - ac_changelog $command + ac_changelog ${command:="lint"} elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-module-doc" ] ; then - ac_module_doc + ac_module_doc ${command:=""} elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-install" ] ; then - ac_install $version + ac_install $version $name elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-lint" ] ; then ac_ansible_lint elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-sanity" ] ; then - ac_sanity $version + ac_sanity $version $name elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test" ] ; then - ac_test ${host:=""} ${python:=""} ${zoau:=""} ${file:=""} ${test:=""} ${debug:=""} + ac_test ${host:=""} ${python:=""} ${zoau:=""} ${file:=""} ${test:=""} ${debug:=""} ${name:=""} elif [ "$option_submitted" ] && [ "$option_submitted" = "--test-concurrent" ] ; then - test_concurrent ${host:=""} ${user:=""} ${python:=""} ${zoau:=""} ${pythonpath:=""}\ - ${volumes:="222222,000000"} ${file:="functional/*,unit/*"} "${skip:="test_module_security.py"}"\ - ${itr:="50"} ${replay:="5"} ${timeout:="300"} ${throttle:="True"} ${workers:="1"}\ - ${maxjob:="10"} ${maxnode:="30"} ${bal:="10"} ${verbose:="False"} ${verbosity:="0"}\ - ${debug:="False"} ${extra:="cd `pwd`"} ${returncode:="False"} + test_concurrent "${host:=""}" "${user:=""}" "${python:=""}" "${zoau:=""}" "${pythonpath:=""}"\ + "${volumes:="222222,000000"}" "${file:="functional/*,unit/*"}" "${skip:="test_module_security.py"}"\ + "${itr:="50"}" "${replay:="5"}" "${timeout:="300"}" "${throttle:="True"}" "${workers:="1"}"\ + "${maxjob:="10"}" "${maxnode:="30"}" "${bal:="10"}" "${verbose:="False"}" "${verbosity:="0"}"\ + "${debug:="False"}" "${extra:="cd `pwd`"}" "${returncode:="False"}" "${name:=""}" elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-test-config" ] ; then - ac_test_config + ac_test_config ${name:=""} elif [ "$option_submitted" ] && [ "$option_submitted" = "--test-dep-find" ] ; then - ac_test_dep_finder ${branch:=""} "${skip:="test_module_security.py"}" ${pretty:="true"} + ac_test_dep_finder "${branch:=""}" "${skip:="test_module_security.py"}" ${pretty:="true"} elif [ "$option_submitted" ] && [ "$option_submitted" = "--test-pytest-find" ] ; then ac_test_pytest_finder ${file:="functional/*,unit/*"} "${skip:="test_module_security.py"}"\ ${pretty:="true"} elif [ "$option_submitted" ] && [ "$option_submitted" = "--ac-version" ] ; then - ac_version + ac_version ${name:=""} elif [ "$option_submitted" ] && [ "$option_submitted" = "--file-encrypt" ] ; then file_encrypt $file $out_file $password elif [ "$option_submitted" ] && [ "$option_submitted" = "--file-decrypt" ] ; then @@ -1690,7 +2098,7 @@ elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-mount" ] ; then elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-mounts" ] ; then host_mounts elif [ "$option_submitted" ] && [ "$option_submitted" = "--host-nodes" ] ; then - host_nodes ${all} + host_nodes ${all:="true"} elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-setup" ] ; then venv_setup $password elif [ "$option_submitted" ] && [ "$option_submitted" = "--venv-start" ] ; then diff --git a/docs/Makefile b/docs/Makefile index 5f412c510..1a39ed456 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -141,32 +141,6 @@ module-doc: mkdir -p source/modules; \ fi - # @if ! test -d ../plugins/modules/rexx_module_doc; then \ - # echo "Make ../plugins/modules/rexx_module_doc directory to extract REXX doc into temporary file."; \ - # mkdir -p ../plugins/modules/rexx_module_doc; \ - # else \ - # echo "Delete ../plugins/modules/rexx_module_doc directory used to extract REXX doc into temporary file."; \ - # rm -rf ../plugins/modules/rexx_module_doc; \ - # echo "Make ../plugins/modules/rexx_module_doc directory to extract REXX doc into temporary file."; \ - # mkdir -p ../plugins/modules/rexx_module_doc; \ - # fi - - # @for rexx_module in `ls ../plugins/modules/*rexx`; do\ - # REXX_FILE=`basename $$rexx_module .rexx`; \ - # echo "Extracting documentation for module $$REXX_FILE into ../plugins/modules/rexx_module_doc/$$REXX_FILE.py"; \ - # touch ../plugins/modules/rexx_module_doc/$$REXX_FILE.py; \ - # sed -n "/DOCUMENTATION = '''/,/'''/p" ../plugins/modules/$$REXX_FILE.rexx >> ../plugins/modules/rexx_module_doc/$$REXX_FILE.py; \ - # sed -n "/EXAMPLES = '''/,/'''/p" ../plugins/modules/$$REXX_FILE.rexx >> ../plugins/modules/rexx_module_doc/$$REXX_FILE.py; \ - # sed -n "/RETURN = '''/,/'''/p" ../plugins/modules/$$REXX_FILE.rexx >> ../plugins/modules/rexx_module_doc/$$REXX_FILE.py; \ - # echo "Generating ReStructuredText for module $$REXX_FILE inot source/modules/$$REXX_FILE.rst"; \ - # ansible-doc-extractor --template templates/module.rst.j2 source/modules ../plugins/modules/rexx_module_doc/$$REXX_FILE.py; \ - # done - - # @if test -d ../plugins/modules/rexx_module_doc; then \ - # echo "Delete ../plugins/modules/rexx_module_doc directory used to extract REXX doc into temporary file."; \ - # rm -rf ../plugins/modules/rexx_module_doc; \ - # fi - @if test -e ../plugins/modules/__init__.py; then \ echo "Rename file '../plugins/modules/__init__.py' to ../plugins/modules/__init__.py.skip to avoid reading empty python file.'"; \ mv ../plugins/modules/__init__.py ../plugins/modules/__init__.py.skip; \ diff --git a/scripts/ce.py b/scripts/ce.py index 75bb142ca..8304b9fe4 100644 --- a/scripts/ce.py +++ b/scripts/ce.py @@ -444,7 +444,7 @@ def get_command(self) -> str: Example Return: pytest tests/functional/modules/test_zos_job_submit_func.py::test_job_submit_pds[location1]\ - --host-pattern=allNoneNone --zinventory-raw='{"host": "ec33025a.vmec.svl.ibm.com",\ + --host-pattern=all --zinventory-raw='{"host": "ec33025a.vmec.svl.ibm.com",\ "user": "omvsadm", "zoau": "/zoau/v1.3.1",\ "pyz": "/allpython/3.10/usr/lpp/IBM/cyp/v3r10/pyz",\ "pythonpath": "/zoau/v1.3.1/lib/3.10", "extra_args": {"volumes": ["222222", "000000"]}}' @@ -453,9 +453,7 @@ def get_command(self) -> str: node_temp = self._nodes.get(self.get_hostname()) node_inventory = node_temp.get_inventory_as_string() - return f"""pytest {self._testcase} --host-pattern={self._hostpattern} - {self._capture if self._capture else ""} - {self._verbose if self._verbose else ""} --zinventory-raw='{node_inventory}'""" + return f"""pytest {self._testcase} --host-pattern={self._hostpattern}{self._capture if self._capture else ""}{self._verbose if self._verbose else ""} --zinventory-raw='{node_inventory}'""" def get_hostnames(self) -> list[str]: diff --git a/scripts/configurations/requirements-2.12.env b/scripts/configurations/requirements-2.12.env index 4a0516fc4..193a269e2 100644 --- a/scripts/configurations/requirements-2.12.env +++ b/scripts/configurations/requirements-2.12.env @@ -25,7 +25,7 @@ requirements=( "ansible-core:2.12.10" "pylint" "rstcheck" -"ansible-lint:24.7.0" +"ansible-lint" ) python=( diff --git a/scripts/configurations/requirements-2.17.env b/scripts/configurations/requirements-2.17.env index c0a7373db..4d50053a0 100644 --- a/scripts/configurations/requirements-2.17.env +++ b/scripts/configurations/requirements-2.17.env @@ -22,7 +22,7 @@ # ============================================================================== requirements=( -"ansible-core:2.17.2" +"ansible-core:2.17.0" "pylint" "rstcheck" "ansible-lint:24.7.0" diff --git a/scripts/configurations/requirements-common.env b/scripts/configurations/requirements-common.env index 8c787701b..8abb01d53 100644 --- a/scripts/configurations/requirements-common.env +++ b/scripts/configurations/requirements-common.env @@ -124,6 +124,7 @@ requirements=( "zipp" "paramiko" "prettytable" +"setuptools" ) # This reduced list caused some issues with pytest seeing our conftest plugin diff --git a/scripts/configurations/requirements-doc.env b/scripts/configurations/requirements-doc.env new file mode 100644 index 000000000..a03704c09 --- /dev/null +++ b/scripts/configurations/requirements-doc.env @@ -0,0 +1,106 @@ +#!/bin/sh +# ============================================================================== +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# ============================================================================== +# File name must adhere to reqs-<ansible-major-minor>.sh, supporting concurrent +# patches could come and to do that you would want to not use unique files for +# each ansible version but instead have this file provide the meta-data such +# the sourcing scrips know to create additional ansible venvs and maybe even +# clean up if they are removed from this file. +# eg venvs = [requirements, requirements2, requirments3] +# ============================================================================== + +requirements=( +"alabaster:0.7.12" +"ansible:4.0.0" +"ansible-core:2.11.1" +"ansible-doc-extractor:0.1.5" +"ansible-lint:5.0.7" +"appdirs:1.4.3" +"astroid:2.2.5" +"attrs:20.2.0" +"Babel:2.8.0" +"bleach:3.3.0" +"bleach-allowlist:1.0.3" +"bleach-whitelist:0.0.11" +"bracex:2.1.1" +"certifi:2020.4.5.1" +"cffi:1.16.0" +"chardet:3.0.4" +"click:7.1.2" +"colorama:0.4.3" +"colorclass:2.2.0" +"commonmark:0.9.1" +"cryptography:41.0.7" +"distlib:0.3.0" +"docutils:0.16" +"enrich:1.2.6" +"filelock:3.0.12" +"flake8:3.8.4" +"galaxy-importer:0.3.1" +"idna:2.9" +"imagesize:1.2.0" +"isort:4.3.15" +"Jinja2:2.11.3" +"jmespath:0.10.0" +"lazy-object-proxy:1.3.1" +"Markdown:3.3" +"MarkupSafe:1.1.1" +"mccabe:0.6.1" +"mdToRst:1.1.0" +"packaging:20.3" +"pathspec:0.7.0" +"pip:21.2.4" +"pycodestyle:2.6.0" +"pycparser:2.20" +"pyflakes:2.2.0" +"Pygments:2.6.1" +"pylint:2.3.1" +"pyparsing:2.4.7" +"pytz:2020.1" +"PyYAML:5.3.1" +"requests:2.23.0" +"resolvelib:0.5.4" +"rich:10.1.0" +"rstcheck:3.3.1" +"ruamel.yaml:0.16.12" +"semantic-version:2.8.5" +"setuptools:58.0.4" +"six:1.14.0" +"snowballstemmer:2.0.0" +"Sphinx:3.0.3" +"sphinx-jinja:1.1.1" +"sphinx-rtd-theme:0.4.3" +"sphinx-versions:1.0.0" +"sphinxcontrib-applehelp:1.0.2" +"sphinxcontrib-devhelp:1.0.2" +"sphinxcontrib-htmlhelp:1.0.3" +"sphinxcontrib-jsmath:1.0.1" +"sphinxcontrib-qthelp:1.0.3" +"sphinxcontrib-serializinghtml:1.1.4" +"tenacity:7.0.0" +"typed-ast:1.5.5" +"typing-extensions:3.7.4.3" +"urllib3:1.25.11" +"virtualenv:20.0.15" +"voluptuous:0.11.7" +"wcmatch:8.1.2" +"webencodings:0.5.1" +"wrapt:1.11.1" +"yamllint:1.21.0" +) + +python=( +"<=:python:3.9" +) diff --git a/scripts/hosts.env b/scripts/hosts.env index 0a5ccc70e..2745670db 100644 --- a/scripts/hosts.env +++ b/scripts/hosts.env @@ -28,25 +28,25 @@ host_list_str="ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "iamorenosoto:ec01134a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "fernando:ec01135a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec01105a:ec01105a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01129a:ec01129a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01129a:ec01129a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec01130a:ec01130a${HOST_SUFFIX}:${USER}:${PASS}:production "\ -"ec01131a:ec01131a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01131a:ec01131a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec01132a:ec01132a${HOST_SUFFIX}:${USER}:${PASS}:production "\ -"ec01133a:ec01133a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01134a:ec01134a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01135a:ec01135a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01136a:ec01136a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01137a:ec01137a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01138a:ec01138a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01139a:ec01139a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01145a:ec01145a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01146a:ec01146a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01147a:ec01147a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01148a:ec01148a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01133a:ec01133a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01134a:ec01134a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01135a:ec01135a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01136a:ec01136a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01137a:ec01137a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01138a:ec01138a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01139a:ec01139a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01140a:ec01140a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01145a:ec01145a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01146a:ec01146a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01147a:ec01147a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01148a:ec01148a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec01149a:ec01149a${HOST_SUFFIX}:${USER}:${PASS}:production "\ -"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec01150a:ec01150a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec01151a:ec01151a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec01152a:ec01152a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec01153a:ec01153a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec01154a:ec01154a${HOST_SUFFIX}:${USER}:${PASS}:production "\ @@ -56,6 +56,21 @@ host_list_str="ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec03129a:ec03129a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec03173a:ec03173a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec03175a:ec03175a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec03190a:ec03190a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03191a:ec03191a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03192a:ec03192a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03193a:ec03193a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03194a:ec03194a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03195a:ec03195a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03196a:ec03196a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03197a:ec03197a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03198a:ec03198a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03199a:ec03199a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03200a:ec03200a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03201a:ec03201a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03202a:ec03202a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03203a:ec03203a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec03204a:ec03204a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec32016a:ec32016a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec32024a:ec32024a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec32051a:ec32051a${HOST_SUFFIX}:${USER}:${PASS}:development "\ @@ -64,25 +79,23 @@ host_list_str="ddimatos:ec33017a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33004a:ec33004a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33005a:ec33005a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec33006a:ec33006a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33007a:ec33007a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33008a:ec33008a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33009a:ec33009a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33010a:ec33010a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33011a:ec33011a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec33012a:ec33012a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33012a:ec33012a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec33013a:ec33013a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec33014a:ec33014a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33015a:ec33015a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33016a:ec33016a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec33017a:ec33017a${HOST_SUFFIX}:${USER}:${PASS}:development "\ -"ec33018a:ec33018a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33017a:ec33017a${HOST_SUFFIX}:${USER}:${PASS}:production "\ +"ec33018a:ec33018a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec33019a:ec33019a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33020a:ec33020a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33021a:ec33021a${HOST_SUFFIX}:${USER}:${PASS}:development "\ "ec33022a:ec33022a${HOST_SUFFIX}:${USER}:${PASS}:production "\ -"ec33023a:ec33023a${HOST_SUFFIX}:${USER}:${PASS}:development "\ +"ec33023a:ec33023a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec33024a:ec33024a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec33025a:ec33025a${HOST_SUFFIX}:${USER}:${PASS}:production "\ "ec33026a:ec33026a${HOST_SUFFIX}:${USER}:${PASS}:production "\ diff --git a/scripts/mounts.env b/scripts/mounts.env index dbc32bef7..f8ae98a25 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -28,19 +28,19 @@ # data_set - the z/OS data set containing the binaries to mount # space - must be a space before the closing quote # ------------------------------------------------------------------------------ -zoau_mount_list_str="1:1.0.1-ga:/zoau/v1.0.1-ga:IMSTESTU.ZOAU.V101.GA.ZFS "\ -"2:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ -"3:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ -"4:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ -"5:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ -"6:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ -"7:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ -"8:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ -"9:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ -"10:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V102.GA.ZFS "\ -"11:1.3.0:/zoau/v1.3.0:IMSTESTU.ZOAU.V103.GA5.ZFS "\ -"12:1.3.1:/zoau/v1.3.1:IMSTESTU.ZOAU.V130.ZFS "\ -"13:1.3.2:/zoau/v1.3.2.0:IMSTESTU.ZOAU.V100.GA.ZFS "\ +zoau_mount_list_str="1:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ +"2:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ +"3:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ +"4:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ +"5:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ +"6:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ +"7:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ +"8:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ +"9:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V102.GA.ZFS "\ +"10:1.3.0:/zoau/v1.3.0:IMSTESTU.ZOAU.V103.GA5.ZFS "\ +"11:1.3.1:/zoau/v1.3.1:IMSTESTU.ZOAU.V130.ZFS "\ +"12:1.3.2:/zoau/v1.3.2.0:IMSTESTU.ZOAU.V100.GA.ZFS "\ +"13:1.3.3:/zoau/v1.3.3:IMSTESTU.ZOAU.V101.GA.ZFS "\ "14:latest:/zoau/latest:IMSTESTU.ZOAU.LATEST.ZFS " # ------------------------------------------------------------------------------ diff --git a/scripts/venv.sh b/scripts/venv.sh index 3b662536b..502fabf62 100755 --- a/scripts/venv.sh +++ b/scripts/venv.sh @@ -1,5 +1,5 @@ -#!/bin/sh +#!/bin/bash # ============================================================================== # Copyright (c) IBM Corporation 2022, 2024 # Licensed under the Apache License, Version 2.0 (the "License"); @@ -115,7 +115,8 @@ echo_requirements(){ echo "Unable to source file: $file." fi - if [[ "$file" =~ "latest" ]]; then + #if [[ "$file" =~ "latest" ]]; then + if echo "$file" | grep "latest" >/dev/null; then # eg extract 'latest' from configurations/requirements-latest file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` venv_name="venv"-$ansible_version @@ -139,7 +140,7 @@ echo_requirements(){ REQ=${REQ}"$key==$value;\\n" fi done - echo "${REQ}""${REQ_COMMON}" + echo -e "${REQ}""${REQ_COMMON}" py_req="0" for ver in "${python[@]}" ; do @@ -151,9 +152,31 @@ echo_requirements(){ done } +# Customized ansible.cfg for each managed venv, ./ac will know how to source this so its used during execution. +write_ansible_cfg(){ + ansible_cfg=${ansible_cfg}"[defaults]\\n" + ansible_cfg=${ansible_cfg}"forks = 25\\n" + ansible_cfg=${ansible_cfg}"action_plugins = ${VENV_HOME_MANAGED}/${venv_name}/ansible_collections/ibm/ibm_zos_core/plugins/action\\n" + ansible_cfg=${ansible_cfg}"library = ${VENV_HOME_MANAGED}/${venv_name}/ansible_collections/ibm/ibm_zos_core/plugins/modules\\n" + ansible_cfg=${ansible_cfg}"collections_path = ${VENV_HOME_MANAGED}/${venv_name}/ansible_collections\\n" + ansible_cfg=${ansible_cfg}"remote_tmp = /tmp/ibmz/ansible\\n" + ansible_cfg=${ansible_cfg}"remote_port = 22\n" + ansible_cfg=${ansible_cfg}"\\n" + ansible_cfg=${ansible_cfg}"[connection]\\n" + ansible_cfg=${ansible_cfg}"pipelining = True\\n" + ansible_cfg=${ansible_cfg}"\\n" + ansible_cfg=${ansible_cfg}"[colors]\\n" + ansible_cfg=${ansible_cfg}"verbose = blue\\n" + ansible_cfg=${ansible_cfg}"\\n" + ansible_cfg=${ansible_cfg}"[persistent_connection]\\n" + ansible_cfg=${ansible_cfg}"command_timeout = 60\\n" + + echo -e "${ansible_cfg}">"${VENV_HOME_MANAGED}"/"${venv_name}"/ansible.cfg + unset ansible_cfg +} # Lest normalize the version from 3.10.2 to 3010002000 -# Do we we need that 4th octet? +# Do we we need that 4th octet? normalize_version() { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; } @@ -167,11 +190,16 @@ make_venv_dirs(){ # We should think about the idea of allowing: # --force, --synch, --update thus not sure we need this method and better to # manage this logic inline to write_req - for file in `ls configurations/*requirements-[0-9].[0-9]*.env* configurations/*requirements-latest* 2>/dev/null`; do - if [[ "$file" =~ "latest" ]]; then + for file in `ls configurations/*requirements-[0-9].[0-9]*.env* configurations/*requirements-latest* configurations/*requirements-doc* 2>/dev/null`; do + #if [[ "$file" =~ "latest" ]]; then + if echo "$file" | grep "latest" >/dev/null; then # eg extract 'latest' from configurations/requirements-latest file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` venv_name="venv"-$ansible_version + elif echo "$file" | grep "doc" >/dev/null; then + # eg extract 'doc' from configurations/requirements-doc file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` + venv_name="venv"-$ansible_version else # eg extract 2.14 from configurations/requirements-2.14.sh file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` @@ -211,7 +239,7 @@ write_requirements(){ fi done - for file in `ls configurations/*requirements-[0-9].[0-9]*.env* configurations/*requirements-latest* 2>/dev/null`; do + for file in `ls configurations/*requirements-[0-9].[0-9]*.env* configurations/*requirements-latest* configurations/*requirements-doc* 2>/dev/null`; do # Unset the vars from any prior sourced files unset REQ unset requirements @@ -223,16 +251,21 @@ write_requirements(){ echo "Unable to source file: $file." fi - if [[ "$file" =~ "latest" ]]; then + # if [[ "$file" =~ "latest" ]]; then + if echo "$file" | grep "latest" >/dev/null; then # eg extract 'latest' from configurations/requirements-latest file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` venv_name="venv"-$ansible_version - echo $venv_name + elif echo "$file" | grep "doc" >/dev/null; then + # eg extract 'doc' from configurations/requirements-doc file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` + venv_name="venv"-$ansible_version + # Don't usee the common requirements for the venv-doc, it is all defined in the venv's env file. + REQ_COMMON="" else # eg extract 2.14 from configurations/requirements-2.14.sh file name ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` venv_name="venv"-$ansible_version - echo $venv_name fi for pkg in "${requirements[@]}" ; do @@ -271,11 +304,12 @@ write_requirements(){ py_op="-ge" fi + echo "Venv $venv_name requires Python $py_op version $py_req." discover_python $py_op $py_req # Is the discoverd python >= what the requirements.txt requires? if [ $(normalize_version $VERSION_PYTHON) "$py_op" $(normalize_version $py_req) ]; then - echo "${REQ}${REQ_COMMON}">"${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt + echo -e "${REQ}""${REQ_COMMON}">"${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt cp mounts.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ #cp info.env "${VENV_HOME_MANAGED}"/"${venv_name}"/ #cp info.env.axx "${VENV_HOME_MANAGED}"/"${venv_name}"/ @@ -286,6 +320,7 @@ write_requirements(){ cp ../tests/dependencyfinder.py "${VENV_HOME_MANAGED}"/"${venv_name}"/ cp ce.py "${VENV_HOME_MANAGED}"/"${venv_name}"/ cp -R modules "${VENV_HOME_MANAGED}"/"${venv_name}"/ + write_ansible_cfg # Decrypt file if [ "$option_pass" ]; then @@ -305,6 +340,9 @@ write_requirements(){ echo "SSH_KEY_PIPELINE=\"\"">>"${VENV_HOME_MANAGED}"/"${venv_name}"/info.env.changeme echo "No password was provided, a temporary 'info.env.changeme' file has been created for your convenience." fi + + # Call create_venv_and_pip_install_req here because calling in option '--vsetup' will lose the global python values and pick up the wrong python. + create_venv_and_pip_install_req $file else echo "Not able to create managed venv path: ${VENV_HOME_MANAGED}/${venv_name} , min python required is ${py_req}, found version $VERSION_PYTHON" echo "Consider installing another Python for your system, if on Mac 'brew install python@3.10', otherwise review your package manager" @@ -315,34 +353,69 @@ write_requirements(){ create_venv_and_pip_install_req(){ + unset venv + if echo "$file" | grep "latest" >/dev/null; then + # eg extract 'latest' from configurations/requirements-latest file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` + venv_name="venv"-$ansible_version + elif echo "$file" | grep "doc" >/dev/null; then + # eg extract 'doc' from configurations/requirements-doc file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` + venv_name="venv"-$ansible_version + else + # eg extract 2.14 from configurations/requirements-2.14.sh file name + ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` + venv_name="venv"-$ansible_version + #echo $venv_name + fi - for file in `ls configurations/*requirements-[0-9].[0-9]*.env* configurations/*requirements-latest* 2>/dev/null`; do - unset venv - - if [[ "$file" =~ "latest" ]]; then - # eg extract 'latest' from configurations/requirements-latest file name - ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1` - venv_name="venv"-$ansible_version - else - # eg extract 2.14 from configurations/requirements-2.14.sh file name - ansible_version=`echo $file | cut -d"-" -f2|cut -d"." -f1,2` - venv_name="venv"-$ansible_version - #echo $venv_name + if [ -f $VENV_HOME_MANAGED/$venv_name/requirements.txt ]; then + echo ${DIVIDER} + echo "Creating python virtual environment: ${VENV_HOME_MANAGED}/${venv_name}." + echo ${DIVIDER} + # -------------------------------------------------------------------------- + # This OS pre-check is going beyond the intention of the script but + # without out, the tool will be unbale to run. + # -------------------------------------------------------------------------- + # There is only support for Ubuntu and MacOS. To support other + # distirbutions, grep for: 'Debian' , 'CentOS', 'Fedora'. + # On Ubuntu, , so we need to + # install the python3-venv package using the following command. + if echo "$OSTYPE" |grep 'linux-gnu' >/dev/null; then + DISTRO=$(cat /etc/*release | grep ^NAME) + # On Ubuntu ensurepip is not available in LTS 24.x, support is limited + if echo "$DISTRO" |grep 'Ubuntu' >/dev/null; then + # 'sshpass' will not be on the host, some additional work to add it. + codename=`cat /etc/os-release | grep UBUNTU_CODENAME | cut -d = -f 2` + add-apt-repository "deb http://us.archive.ubuntu.com/ubuntu/ $codename universe multiverse" -y + apt-get update -y + apt install sshpass -y + # This is a less repository driven approach to setting up the venv, for safe keeping, commenting it out. + ${VERSION_PYTHON_PATH} -m venv --without-pip "${VENV_HOME_MANAGED}"/"${venv_name}" + curl https://bootstrap.pypa.io/get-pip.py -o ${VENV_HOME_MANAGED}/${venv_name}/bin/get-pip.py + ${VENV_HOME_MANAGED}/${venv_name}/bin/python${VERSION_PYTHON} ${VENV_HOME_MANAGED}/${venv_name}/bin/get-pip.py + # Below 2 lines result in an architecture issue that continues to be broken, so we need to use '--without-pip'. + # Error: python3.12-venv : Depends: python3.12 (= 3.12.3-1) but 3.12.3-1ubuntu0.1 is to be installed + # apt install python3-pip -y + # apt install python$VERSION_PYTHON-venv -y + elif echo "$DISTRO" |grep 'Red Hat Enterprise Linux' >/dev/null; then + # Install the Python versions + dnf install python${VERSION_PYTHON} -y + dnf install python${VERSION_PYTHON}-pip -y + dnf install sshpass -y + fi + # elif echo "$OSTYPE" |grep 'darwin' >/dev/null; then + # Nothing to do here for now, we may want to ensure sshpass in present for MacOS fi - - if [ -f $VENV_HOME_MANAGED/$venv_name/requirements.txt ]; then - echo ${DIVIDER} - echo "Creating python virtual environment: ${VENV_HOME_MANAGED}/${venv_name}." - echo ${DIVIDER} - ${VERSION_PYTHON_PATH} -m venv "${VENV_HOME_MANAGED}"/"${venv_name}"/ - ${VENV_HOME_MANAGED}/${venv_name}/bin/pip3 install --upgrade pip - ${VENV_HOME_MANAGED}/${venv_name}/bin/pip install --upgrade pip - "${VENV_HOME_MANAGED}"/"${venv_name}"/bin/pip3 install -r "${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt - else - echo "Virtual environment "${VENV_HOME_MANAGED}"/"${venv_name}" already exists, no changes made."; \ - fi - done + # Complete the VENV creation and installation of packages + ${VERSION_PYTHON_PATH} -m venv "${VENV_HOME_MANAGED}"/"${venv_name}" + ${VENV_HOME_MANAGED}/${venv_name}/bin/pip3 install --upgrade pip + ${VENV_HOME_MANAGED}/${venv_name}/bin/pip install --upgrade pip + "${VENV_HOME_MANAGED}"/"${venv_name}"/bin/pip3 install -r "${VENV_HOME_MANAGED}"/"${venv_name}"/requirements.txt + else + echo "Virtual environment "${VENV_HOME_MANAGED}"/"${venv_name}" already exists or uanble to create venv, no changes made."; \ + fi } @@ -378,18 +451,17 @@ discover_python(){ # Don't use which, it only will find first in path within the script # for python_found in `which python3 | cut -d" " -f3`; do # - # The 'pys' array will search for pythons in reverse order, once it finds one that matches - # the configurations/requirements-x.xx.env it does not continue searching. Reverse order is important to - # maintain. - pys=("python3.14" "python3.13" "python3.12" "python3.11" "python3.10" "python3.9" "python3.8") - rc=1 - for py in "${pys[@]}"; do + # 'pys' is in reverse order, once there is a match to configurationsrequirements-x.xx.env + # it does not continue searching. Reverse order is important to maintain. + rc=-1 + pys="python3.14 python3.13 python3.12 python3.11 python3.10 python3.9 python3.8" + for py in $(echo $pys| tr ' ' '\n');do for python_found in `find_in_path $py`; do ver=`${python_found} --version | cut -d" " -f2` rc=$? ver=`echo $ver |cut -d"." -f1,2` ver_path="$python_found" - echo "Found $ver_path" + echo "Found Python installation: $ver_path" done if [ $rc -eq 0 ];then @@ -401,9 +473,30 @@ discover_python(){ fi done + # Not posix compliant usage of arrays. + # pys=("python3.14" "python3.13" "python3.12" "python3.11" "python3.10" "python3.9" "python3.8") + # rc=1 + # for py in "${pys[@]}"; do + # for python_found in `find_in_path $py`; do + # ver=`${python_found} --version | cut -d" " -f2` + # rc=$? + # ver=`echo $ver |cut -d"." -f1,2` + # ver_path="$python_found" + # echo "Found $ver_path" + # done + + # if [ $rc -eq 0 ];then + # if [ $(normalize_version $ver) "$operator" $(normalize_version $VERSION_PYTHON) ]; then + # VERSION_PYTHON="$ver" + # VERSION_PYTHON_PATH="$ver_path" + # break + # fi + # fi + # done + echo ${DIVIDER} - echo "Discovered Python version: ${VERSION_PYTHON}." - echo "Discovered Python path: ${VERSION_PYTHON_PATH}." + echo "Requested Python version: ${VERSION_PYTHON}." + echo "Selected Python path: ${VERSION_PYTHON_PATH}." echo ${DIVIDER} } ################################################################################ @@ -613,31 +706,8 @@ ssh_copy_files_and_mount(){ } ################################################################################ -# Echo the configuration used by the ansible core python test framework +# Write the configuration used by the ansible core python test framework ################################################################################ -echo_config(){ -unset CONFIG - -CONFIG=${CONFIG}"host: ${host}\\\n" -CONFIG=${CONFIG}"user: ${user}\\\n" -CONFIG=${CONFIG}"python_path: ${PYZ_HOME}/bin/python3\\\n" -CONFIG=${CONFIG}"\\\n" -CONFIG=${CONFIG}"environment:\\\n" -CONFIG=${CONFIG}" _BPXK_AUTOCVT: \"ON\"\\\n" -CONFIG=${CONFIG}" _CEE_RUNOPTS: \"'FILETAG(AUTOCVT,AUTOTAG) POSIX(ON)'\"\\\n" -CONFIG=${CONFIG}" _TAG_REDIR_ERR: txt\\\n" -CONFIG=${CONFIG}" _TAG_REDIR_IN: txt\\\n" -CONFIG=${CONFIG}" _TAG_REDIR_OUT: txt\\\n" -CONFIG=${CONFIG}" LANG: C\\\n" -CONFIG=${CONFIG}" ZOAU_HOME: ${ZOAU_HOME}\\\n" -CONFIG=${CONFIG}" LIBPATH: ${ZOAU_HOME}/lib:${PYZ_HOME}/lib:/lib:/usr/lib:.\\\n" -CONFIG=${CONFIG}" PYTHONPATH: ${ZOAU_HOME}/lib\\\n" -CONFIG=${CONFIG}" PATH: ${ZOAU_HOME}/bin:${PYZ_HOME}/bin:/bin:/usr/sbin:/var/bin\\\n" -CONFIG=${CONFIG}" PYTHONSTDINENCODING: \"cp1047\"\\n" - -echo ${CONFIG} -} - write_test_config(){ unset CONFIG host_zvm=$1 @@ -645,6 +715,13 @@ pyz_version=$2 zoau_version=$3 managed_venv_path=$4 +zoau_pyz=`echo $pyz_version | cut -d "." -f1,2` + +# If the zoau version is less than 1.3 then set the wheel var to empty, else pass the py version. +if [ $(normalize_version $zoau_version) -lt $(normalize_version "1.3") ]; then + zoau_pyz="" +fi + ssh_host_credentials "$host_zvm" get_python_mount "$pyz_version" get_zoau_mount "$zoau_version" @@ -661,11 +738,11 @@ CONFIG=${CONFIG}" _TAG_REDIR_OUT: txt\\n" CONFIG=${CONFIG}" LANG: C\\n" CONFIG=${CONFIG}" ZOAU_HOME: ${ZOAU_HOME}\\n" CONFIG=${CONFIG}" LIBPATH: ${ZOAU_HOME}/lib:${PYZ_HOME}/lib:/lib:/usr/lib:.\\n" -CONFIG=${CONFIG}" PYTHONPATH: ${ZOAU_HOME}/lib\\n" +CONFIG=${CONFIG}" PYTHONPATH: ${ZOAU_HOME}/lib/$zoau_pyz\\n" CONFIG=${CONFIG}" PATH: ${ZOAU_HOME}/bin:${PYZ_HOME}/bin:/bin:/usr/sbin:/var/bin\\n" CONFIG=${CONFIG}" PYTHONSTDINENCODING: \"cp1047\"\\n" -echo $CONFIG>$managed_venv_path/config.yml +echo -e $CONFIG>$managed_venv_path/config.yml } ################################################################################ @@ -689,7 +766,7 @@ case "$1" in ssh_host_credentials $2 echo "$pass" ;; ---host-setup-files) #ec33017a "mounts.env" "mounts.sh" "shell-helper.sh" "profile.sh" +--host-setup-files) ssh_host_credentials $2 ssh_copy_files_and_mount $3 $4 $5 ;; @@ -706,20 +783,15 @@ case "$1" in discover_python ;; --vsetup) - #discover_python make_venv_dirs - #echo_requirements write_requirements $3 - create_venv_and_pip_install_req ;; --latest_venv) latest_venv ;; --perform-unit-test) discover_python - #make_venv_dirs echo_requirements - #write_requirements $3 ;; *) echo "ERROR: unknown parameter $1" diff --git a/tests/helpers/ztest.py b/tests/helpers/ztest.py index e471dfb26..426af244c 100644 --- a/tests/helpers/ztest.py +++ b/tests/helpers/ztest.py @@ -90,7 +90,7 @@ def from_args(cls, src): """ #TODO: add support for a positional string, eg "host,user,zoau,pyz" then convert it as needed - host, user, zoau, pyz, pythonpath, extra_args, extra = None, None, None, None, None, None, None + host, user, zoau, pyz, pythonpath, extra_args = None, None, None, None, None, None src = json.loads(src) # Traverse the src here , can we trow an exception? From c543e46d2e85e8986772820bcb0fe71fa4feb73e Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Tue, 17 Sep 2024 14:55:51 -0700 Subject: [PATCH 466/495] Add non-utf8 printables test case full coverage. (#1689) * Add non-utf8 printables test case full coverage Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/1689-add-non-utf8-testcase.yml | 4 ++ .../modules/test_zos_job_submit_func.py | 37 +++++-------------- 2 files changed, 13 insertions(+), 28 deletions(-) create mode 100644 changelogs/fragments/1689-add-non-utf8-testcase.yml diff --git a/changelogs/fragments/1689-add-non-utf8-testcase.yml b/changelogs/fragments/1689-add-non-utf8-testcase.yml new file mode 100644 index 000000000..7ad1c8190 --- /dev/null +++ b/changelogs/fragments/1689-add-non-utf8-testcase.yml @@ -0,0 +1,4 @@ +trivial: + - zos_job_submit - Added a test case for non-printable UTF8 characters + to validate ZOAU changes. This covers the full EBCDIC range. + (https://github.com/ansible-collections/ibm_zos_core/pull/1689). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index 93844ed4c..0815745c4 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -344,34 +344,15 @@ C_SRC_INVALID_UTF8 = """#include <stdio.h> int main() { - unsigned char a=0x64; - unsigned char b=0x2A; - unsigned char c=0xB8; - unsigned char d=0xFF; - unsigned char e=0x81; - unsigned char f=0x82; - unsigned char g=0x83; - unsigned char h=0x00; - /* The following are non-printables from DBB. */ - unsigned char nl=0x15; - unsigned char cr=0x0D; - unsigned char lf=0x25; - unsigned char shiftOut=0x0E; - unsigned char shiftIn=0x0F; - - printf("Value of a: Hex: %X, character: %c",a,a); - printf("Value of b: Hex: %X, character: %c",b,b); - printf("Value of c: Hex: %X, character: %c",c,c); - printf("Value of d: Hex: %X, character: %c",d,d); - printf("Value of e: Hex: %X, character: %c",e,e); - printf("Value of f: Hex: %X, character: %c",f,f); - printf("Value of g: Hex: %X, character: %c",g,g); - printf("Value of h: Hex: %X, character: %c",h,h); - printf("Value of NL: Hex: %X, character: %c",nl,nl); - printf("Value of CR: Hex: %X, character: %c",cr,cr); - printf("Value of LF: Hex: %X, character: %c",lf,lf); - printf("Value of Shift-Out: Hex: %X, character: %c",shiftOut,shiftOut); - printf("Value of Shift-In: Hex: %X, character: %c",shiftIn,shiftIn); + /* Generate and print all EBCDIC characters to stdout to + * ensure non-printable chars can be handled by Python. + * This will included the non-printable hex from DBB docs: + * nl=0x15, cr=0x0D, lf=0x25, shiftOut=0x0E, shiftIn=0x0F + */ + + for (int i = 0; i <= 255; i++) { + printf("Hex 0x%X is character: (%c)\\\\n",i,(char)(i)); + } return 0; } From 5ad5ac861feb1589c691df0e7cdc926fbb1f786e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 24 Sep 2024 10:13:19 -0600 Subject: [PATCH 467/495] [bugfix][1678]Add validation for maker begin end (#1684) * Fix probable issue * Add fragment * Update changelogs/fragments/1684-Add_validation_for_marker_begin_end.yml Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Update plugins/modules/zos_blockinfile.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Update plugins/modules/zos_blockinfile.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Update plugins/modules/zos_blockinfile.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Fix blockinfile test case * Update plugins/modules/zos_blockinfile.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Adapt text * Update plugins/modules/zos_blockinfile.py --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../fragments/1684-Add_validation_for_marker_begin_end.yml | 4 ++++ plugins/modules/zos_blockinfile.py | 5 ++++- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 changelogs/fragments/1684-Add_validation_for_marker_begin_end.yml diff --git a/changelogs/fragments/1684-Add_validation_for_marker_begin_end.yml b/changelogs/fragments/1684-Add_validation_for_marker_begin_end.yml new file mode 100644 index 000000000..067a17784 --- /dev/null +++ b/changelogs/fragments/1684-Add_validation_for_marker_begin_end.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_blockinfile - Previously module was not able to delete a block when 'marker_begin' and 'marker_end' + were set to the same value. Fix introduces a requirement for 'marker_begin' and 'marker_end' to have different values. + (https://github.com/ansible-collections/ibm_zos_core/pull/1684). \ No newline at end of file diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index ab6d2a0dd..2befe61ef 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -96,6 +96,7 @@ marker_begin: description: - This will be inserted at C({mark}) in the opening ansible block marker. + - Value needs to be different from marker_end. required: false type: str default: BEGIN @@ -103,6 +104,7 @@ required: false description: - This will be inserted at C({mark}) in the closing ansible block marker. + - Value must be different from marker_end. type: str default: END backup: @@ -759,7 +761,8 @@ def main(): marker_begin = 'BEGIN' if not marker_end: marker_end = 'END' - + if marker_begin == marker_end: + module.fail_json(msg='marker_begin and marker_end must be different.') marker = "{0}\\n{1}\\n{2}".format(marker_begin, marker_end, marker) block = transformBlock(block, ' ', indentation) # analysis the file type From 734f0c316f843ea1c470a031d14fcbba8f0c726d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 24 Sep 2024 10:13:39 -0600 Subject: [PATCH 468/495] [Enabler][1686]lineinfile_portability (#1687) * Fix portability * Add fragment * Fix positional --- .../fragments/1687-lineinfile_portability.yml | 3 + .../modules/test_zos_lineinfile_func.py | 153 +++++++++--------- 2 files changed, 84 insertions(+), 72 deletions(-) create mode 100644 changelogs/fragments/1687-lineinfile_portability.yml diff --git a/changelogs/fragments/1687-lineinfile_portability.yml b/changelogs/fragments/1687-lineinfile_portability.yml new file mode 100644 index 000000000..df00fc9a2 --- /dev/null +++ b/changelogs/fragments/1687-lineinfile_portability.yml @@ -0,0 +1,3 @@ +trivial: + - zos_lineinfile- Remove the use of hard coded dataset and files names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1687). \ No newline at end of file diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 6e83a2fed..742f91522 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -18,11 +18,14 @@ import pytest from shellescape import quote -from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.dataset import ( + get_tmp_ds_name, + get_random_q, +) -__metaclass__ = type +from ibm_zos_core.tests.helpers.utils import get_random_file_name -TEST_FOLDER_LINEINFILE = "/tmp/ansible-core-tests/zos_lineinfile/" +__metaclass__ = type c_pgm="""#include <stdio.h> #include <stdlib.h> @@ -41,7 +44,7 @@ call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //LOCKMEM EXEC PGM=BPXBATCH //STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}({1})' +SH {0}pdse-lock '{1}({2})' //STDIN DD DUMMY //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* @@ -212,13 +215,12 @@ Insert this string""" def set_uss_environment(ansible_zos_module, content, file): hosts = ansible_zos_module - hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_LINEINFILE}") hosts.all.file(path=file, state="touch") hosts.all.shell(cmd=f"echo \"{content}\" > {file}") -def remove_uss_environment(ansible_zos_module): +def remove_uss_environment(ansible_zos_module, file): hosts = ansible_zos_module - hosts.all.shell(cmd=f"rm -rf {TEST_FOLDER_LINEINFILE}") + hosts.all.shell(cmd="rm " + file) def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module @@ -246,6 +248,8 @@ def remove_ds_environment(ansible_zos_module, ds_name): # The encoding will be only use on a few test ENCODING = [ 'ISO8859-1', 'UTF-8'] +TMP_DIRECTORY = "/tmp/" + ######################### # USS test cases ######################### @@ -259,7 +263,7 @@ def test_uss_line_replace(ansible_zos_module): "line":"ZOAU_ROOT=/mvsutil-develop_dsed", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -271,7 +275,7 @@ def test_uss_line_replace(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -282,7 +286,7 @@ def test_uss_line_insertafter_regex(ansible_zos_module): "line":"ZOAU_ROOT=/mvsutil-develop_dsed", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -294,7 +298,7 @@ def test_uss_line_insertafter_regex(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -305,7 +309,7 @@ def test_uss_line_insertbefore_regex(ansible_zos_module): "line":"unset ZOAU_ROOT", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -317,7 +321,7 @@ def test_uss_line_insertbefore_regex(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -328,7 +332,7 @@ def test_uss_line_insertafter_eof(ansible_zos_module): "line":"export 'ZOAU_ROOT'", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -340,7 +344,7 @@ def test_uss_line_insertafter_eof(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -351,7 +355,7 @@ def test_uss_line_insertbefore_bof(ansible_zos_module): "line":"# this is file is for setting env vars", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -363,7 +367,7 @@ def test_uss_line_insertbefore_bof(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -375,7 +379,7 @@ def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): "line":"ZOAU_ROOT=/mvsutil-develop_dsed", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -387,7 +391,7 @@ def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -399,7 +403,7 @@ def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): "line":"unset ZOAU_ROOT", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -411,7 +415,7 @@ def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -423,7 +427,7 @@ def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): "line":"ZOAU_ROOT=/mvsutil-develop_dsed", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -435,7 +439,7 @@ def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -447,7 +451,7 @@ def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): "line":"unset ZOAU_ROOT", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -459,7 +463,7 @@ def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -471,7 +475,7 @@ def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): "line":"ZOAU_ROOT=/mvsutil-develop_dsed", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -483,7 +487,7 @@ def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -495,7 +499,7 @@ def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): "line":"unset ZOAU_ROOT", "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -507,7 +511,7 @@ def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -518,7 +522,7 @@ def test_uss_line_absent(ansible_zos_module): "line":"ZOAU_ROOT=/usr/lpp/zoautil/v100", "state":"absent" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -531,7 +535,7 @@ def test_uss_line_absent(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -543,7 +547,7 @@ def test_uss_line_replace_quoted_escaped(ansible_zos_module): "line":'ZOAU_ROOT=\"/mvsutil-develop_dsed\"', "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -555,7 +559,7 @@ def test_uss_line_replace_quoted_escaped(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -567,7 +571,7 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): "line":'ZOAU_ROOT="/mvsutil-develop_dsed"', "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -579,7 +583,7 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss def test_uss_line_does_not_insert_repeated(ansible_zos_module): @@ -589,7 +593,7 @@ def test_uss_line_does_not_insert_repeated(ansible_zos_module): "line":'ZOAU_ROOT=/usr/lpp/zoautil/v100', "state":"present" } - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -606,7 +610,7 @@ def test_uss_line_does_not_insert_repeated(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == '1' finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) ######################### # Dataset test cases @@ -627,7 +631,7 @@ def test_ds_line_insertafter_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -653,7 +657,7 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -679,7 +683,7 @@ def test_ds_line_insertafter_eof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -704,7 +708,7 @@ def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -731,7 +735,7 @@ def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -758,7 +762,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -872,7 +876,7 @@ def test_special_characters_ds_insert_line(ansible_zos_module): # regexp="abcxyz", insertafter="ZOAU_ROOT=", line="ZOAU_ROOT=/mvsutil-develop_dsed", # state="present") # ds_name = get_tmp_ds_name() -# temp_file = "/tmp/" + ds_name +# temp_file = get_random_file_name(dir=TMP_DIRECTORY) # content = TEST_CONTENT # try: # ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -896,7 +900,7 @@ def test_special_characters_ds_insert_line(ansible_zos_module): # params = dict(regexp="abcxyz", insertbefore="ZOAU_ROOT=", # line="unset ZOAU_ROOT", state="present") # ds_name = get_tmp_ds_name() -# temp_file = "/tmp/" + ds_name +# temp_file = get_random_file_name(dir=TMP_DIRECTORY) # content = TEST_CONTENT # try: # ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -920,7 +924,7 @@ def test_special_characters_ds_insert_line(ansible_zos_module): # params = dict(regexp="abcxyz", insertafter="xyzijk", # line="ZOAU_ROOT=/mvsutil-develop_dsed", state="present") # ds_name = get_tmp_ds_name() -# temp_file = "/tmp/" + ds_name +# temp_file = get_random_file_name(dir=TMP_DIRECTORY) # content = TEST_CONTENT # try: # ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -943,7 +947,7 @@ def test_special_characters_ds_insert_line(ansible_zos_module): # ds_type = dstype # params = dict(regexp="abcxyz", insertbefore="xyzijk", line="unset ZOAU_ROOT", state="present") # ds_name = get_tmp_ds_name() -# temp_file = "/tmp/" + ds_name +# temp_file = get_random_file_name(dir=TMP_DIRECTORY) # content = TEST_CONTENT # try: # ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -970,7 +974,7 @@ def test_ds_line_absent(ansible_zos_module, dstype): "state":"absent" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -990,20 +994,21 @@ def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets hosts = ansible_zos_module ds_type = "seq" + hlq = get_random_q() kwargs = { - "backup_name":r"TMPHLQ\.." + "backup_name": hlq } params = { "insertafter":"EOF", "line":"export ZOAU_ROOT", "state":"present", "backup":True, - "tmp_hlq":"TMPHLQ" + "tmp_hlq": hlq } content = TEST_CONTENT try: ds_full_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_full_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " @@ -1016,7 +1021,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): for key in kwargs: - assert re.match(kwargs.get(key), result.get(key)) + assert kwargs.get(key) in result.get(key) finally: hosts.all.zos_data_set(name=ds_full_name, state="absent") @@ -1061,7 +1066,7 @@ def test_ds_line_force(ansible_zos_module, dstype): "force":"True" } member_1, member_2 = "MEM1", "MEM2" - temp_file = f"/tmp/{member_2}" + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT if ds_type == "seq": params["path"] = f"{default_data_set_name}.{member_2}" @@ -1094,12 +1099,15 @@ def test_ds_line_force(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") - hosts.all.shell(cmd="echo \"{0}\" > /tmp/disp_shr/call_c_pgm.jcl".format(call_c_jcl.format( - default_data_set_name,member_1)) + path = get_random_file_name(suffix="/", dir=TMP_DIRECTORY) + hosts.all.file(path=path, state="directory") + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > {path}pdse-lock.c") + hosts.all.shell( + cmd=f"echo \"{call_c_jcl.format(path, default_data_set_name, member_1)}\""+ + " > {0}call_c_pgm.jcl".format(path) ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=path) + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=path) time.sleep(5) # call lineinfile to see results results = hosts.all.zos_lineinfile(**params) @@ -1113,7 +1121,7 @@ def test_ds_line_force(ansible_zos_module, dstype): ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") - hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.shell(cmd='rm -r {0}'.format(path)) hosts.all.zos_data_set(name=default_data_set_name, state="absent") @@ -1131,7 +1139,7 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): "force":False } member_1, member_2 = "MEM1", "MEM2" - temp_file = f"/tmp/{member_2}" + temp_file = get_random_file_name(dir=TMP_DIRECTORY) params["path"] = f"{default_data_set_name}({member_2})" content = TEST_CONTENT try: @@ -1157,13 +1165,15 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - results = hosts.all.file(path="/tmp/disp_shr", state='directory') - hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") - hosts.all.shell(cmd="echo \"{0}\" > /tmp/disp_shr/call_c_pgm.jcl".format(call_c_jcl.format( - default_data_set_name,member_1)) + path = get_random_file_name(suffix="/", dir=TMP_DIRECTORY) + hosts.all.file(path=path, state="directory") + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > {path}pdse-lock.c") + hosts.all.shell( + cmd=f"echo \"{call_c_jcl.format(path, default_data_set_name, member_1)}\""+ + " > {0}call_c_pgm.jcl".format(path) ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=path) + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=path) time.sleep(5) # call lineinfile to see results results = hosts.all.zos_lineinfile(**params) @@ -1174,7 +1184,7 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") - hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.shell(cmd='rm -r {0}'.format(path)) hosts.all.zos_data_set(name=default_data_set_name, state="absent") @@ -1188,7 +1198,7 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1229,10 +1239,9 @@ def test_uss_encoding(ansible_zos_module, encoding): } } params["encoding"] = encoding - full_path = TEST_FOLDER_LINEINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = "SIMPLE LINE TO VERIFY" try: - hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_LINEINFILE}") hosts.all.file(path=full_path, state="touch") hosts.all.shell(cmd=f"echo \"{content}\" > {full_path}") params["path"] = full_path @@ -1243,7 +1252,7 @@ def test_uss_encoding(ansible_zos_module, encoding): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.ds @@ -1264,7 +1273,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): } params["encoding"] = encoding ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = "SIMPLE LINE TO VERIFY" try: hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") From cbc7b3c4856e197ca48329d4efac26545571a4df Mon Sep 17 00:00:00 2001 From: Ketan Kelkar <ktnklkr@gmail.com> Date: Tue, 24 Sep 2024 10:37:47 -0700 Subject: [PATCH 469/495] Bugfix/1619/zos mvs raw base64 mode (#1691) * update run_command error algorithm - tsocmd Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update run_command error algorithm - mvsraw Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update calls to run_command Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove kwarg in wrapper for run_command Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * remove extra space char Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * switch from error algo from backslashreplace to replace Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos_mvs_raw func tests for new errors algo Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update zos mvs raw func tests for non utf8 char depr warning fixes Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * implement base64 when 'base64' is specified in return_content Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * rename internal variable from binary to base64 since that's what it indicates Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * update docs to reflect base64 option produces base64 encoded output Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve leftover merge conflicts Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * resolve pep8 issues Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> * add changelog fragment Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --------- Signed-off-by: Ketan Kelkar <ktnklkr@gmail.com> --- .../1691-zos-mvs-raw-base64-mode.yml | 4 ++ plugins/modules/zos_mvs_raw.py | 68 +++++++++++-------- .../modules/test_zos_mvs_raw_func.py | 16 +++-- 3 files changed, 54 insertions(+), 34 deletions(-) create mode 100644 changelogs/fragments/1691-zos-mvs-raw-base64-mode.yml diff --git a/changelogs/fragments/1691-zos-mvs-raw-base64-mode.yml b/changelogs/fragments/1691-zos-mvs-raw-base64-mode.yml new file mode 100644 index 000000000..7064e914a --- /dev/null +++ b/changelogs/fragments/1691-zos-mvs-raw-base64-mode.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_mvs_raw - base64 sub-option for return_content under option for retrieving + DD output did not return base64. Fix now returns the base64 encoded contents of the DD. + (https://github.com/ansible-collections/ibm_zos_core/pull/1691). \ No newline at end of file diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 0a9394b67..dbd63c2f8 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -340,7 +340,7 @@ - The type of the content to be returned. - C(text) means return content in encoding specified by I(response_encoding). - I(src_encoding) and I(response_encoding) are only used when I(type=text). - - C(base64) means return content in binary mode. + - C(base64) means return content as base64 encoded in binary. type: str choices: - text @@ -520,7 +520,7 @@ - The type of the content to be returned. - C(text) means return content in encoding specified by I(response_encoding). - I(src_encoding) and I(response_encoding) are only used when I(type=text). - - C(base64) means return content in binary mode. + - C(base64) means return content as base64 encoded in binary. type: str choices: - text @@ -587,7 +587,7 @@ - The type of the content to be returned. - C(text) means return content in encoding specified by I(response_encoding). - I(src_encoding) and I(response_encoding) are only used when I(type=text). - - C(base64) means return content in binary mode. + - C(base64) means return content as base64 encoded in binary. type: str choices: - text @@ -628,7 +628,7 @@ - The type of the content to be returned. - C(text) means return content in encoding specified by I(response_encoding). - I(src_encoding) and I(response_encoding) are only used when I(type=text). - - C(base64) means return content in binary mode. + - C(base64) means return content as base64 encoded in binary. type: str choices: - text @@ -959,7 +959,7 @@ - The type of the content to be returned. - C(text) means return content in encoding specified by I(response_encoding). - I(src_encoding) and I(response_encoding) are only used when I(type=text). - - C(base64) means return content in binary mode. + - C(base64) means return content as base64 encoded in binary. type: str choices: - text @@ -1137,7 +1137,7 @@ - The type of the content to be returned. - C(text) means return content in encoding specified by I(response_encoding). - I(src_encoding) and I(response_encoding) are only used when I(type=text). - - C(base64) means return content in binary mode. + - C(base64) means return content as base64 encoded in binary. type: str choices: - text @@ -1199,7 +1199,7 @@ - The type of the content to be returned. - C(text) means return content in encoding specified by I(response_encoding). - I(src_encoding) and I(response_encoding) are only used when I(type=text). - - C(base64) means return content in binary mode. + - C(base64) means return content as base64 encoded in binary. type: str choices: - text @@ -1643,15 +1643,18 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import ( AnsibleModuleHelper, ) + +import base64 import re import traceback from shlex import quote try: - from zoautil_py import datasets + from zoautil_py import datasets, zoau_io except Exception: datasets = ZOAUImportError(traceback.format_exc()) + zoau_io = ZOAUImportError(traceback.format_exc()) ENCODING_ENVIRONMENT_VARS = {"_BPXK_AUTOCVT": "OFF"} @@ -2948,12 +2951,12 @@ def get_data_set_output(dd_statement): if dd_statement.definition.return_content.type == "text": contents = get_data_set_content( name=dd_statement.definition.name, - binary=False, + base64_encode=False, from_encoding=dd_statement.definition.return_content.src_encoding, to_encoding=dd_statement.definition.return_content.response_encoding, ) elif dd_statement.definition.return_content.type == "base64": - contents = get_data_set_content(name=dd_statement.definition.name, binary=True) + contents = get_data_set_content(name=dd_statement.definition.name, base64_encode=True) return build_dd_response(dd_statement.name, dd_statement.definition.name, contents) @@ -2974,12 +2977,12 @@ def get_unix_file_output(dd_statement): if dd_statement.definition.return_content.type == "text": contents = get_unix_content( name=dd_statement.definition.name, - binary=False, + base64_encode=False, from_encoding=dd_statement.definition.return_content.src_encoding, to_encoding=dd_statement.definition.return_content.response_encoding, ) elif dd_statement.definition.return_content.type == "base64": - contents = get_unix_content(name=dd_statement.definition.name, binary=True) + contents = get_unix_content(name=dd_statement.definition.name, base64_encode=True) return build_dd_response(dd_statement.name, dd_statement.definition.name, contents) @@ -3034,15 +3037,15 @@ def build_dd_response(dd_name, name, contents): return dd_response -def get_data_set_content(name, binary=False, from_encoding=None, to_encoding=None): +def get_data_set_content(name, base64_encode=False, from_encoding=None, to_encoding=None): """Retrieve the raw contents of a data set. Parameters ---------- name : str The name of the data set. - binary : bool, optional - Determines if contents are retrieved without encoding conversion. Defaults to False. + base64_encode : bool, optional + Determines if contents are retrieved as binary and base64 encoded. Defaults to False. from_encoding : str, optional The encoding of the data set on the z/OS system. Defaults to None. to_encoding : str, optional @@ -3056,20 +3059,24 @@ def get_data_set_content(name, binary=False, from_encoding=None, to_encoding=Non quoted_name = quote(name) if "'" not in quoted_name: quoted_name = "'{0}'".format(quoted_name) - return get_content( - '"//{0}"'.format(quoted_name), binary, from_encoding, to_encoding - ) + + if base64_encode: + with zoau_io.RecordIO("//{0}".format(quoted_name), "r") as records: + content = base64.b64encode(b''.join(records.readrecords())).decode() + else: + content = get_content('"//{0}"'.format(quoted_name), from_encoding, to_encoding) + return content -def get_unix_content(name, binary=False, from_encoding=None, to_encoding=None): +def get_unix_content(name, base64_encode=False, from_encoding=None, to_encoding=None): """Retrieve the raw contents of a UNIX file. Parameters ---------- name : str The name of the UNIX file. - binary : bool, optional - Determines if contents are retrieved without encoding conversion. Defaults to False. + base64_encode : bool, optional + Determines if contents are retrieved as binary and base64 encoded. Defaults to False. from_encoding : str, optional The encoding of the UNIX file on the z/OS system. Defaults to None. to_encoding : str, optional @@ -3080,18 +3087,21 @@ def get_unix_content(name, binary=False, from_encoding=None, to_encoding=None): stdout : str The raw content of the UNIX file. """ - return get_content("{0}".format(quote(name)), binary, from_encoding, to_encoding) + if base64_encode: + with open(name, "rb") as f: + content = base64.b64encode(f.read()).decode() + else: + content = get_content("{0}".format(quote(name)), from_encoding, to_encoding) + return content -def get_content(formatted_name, binary=False, from_encoding=None, to_encoding=None): +def get_content(formatted_name, from_encoding=None, to_encoding=None): """Retrieve raw contents of a data set or UNIXfile. Parameters ---------- name : str The name of the data set or UNIX file, formatted and quoted for proper usage in command. - binary : bool, optional - Determines if contents are retrieved without encoding conversion. Defaults to False. from_encoding : str, optional The encoding of the data set or UNIX file on the z/OS system. Defaults to None. to_encoding : str, optional @@ -3103,11 +3113,9 @@ def get_content(formatted_name, binary=False, from_encoding=None, to_encoding=No The raw content of the data set or UNIX file. If unsuccessful in retrieving data, returns empty string. """ module = AnsibleModuleHelper(argument_spec={}) - conversion_command = "" - if not binary: - conversion_command = " | iconv -f {0} -t {1}".format( - quote(from_encoding), quote(to_encoding) - ) + conversion_command = " | iconv -f {0} -t {1}".format( + quote(from_encoding), quote(to_encoding) + ) # * name argument should already be quoted by the time it reaches here # TODO: determine if response should be byte object rc, stdout, stderr = module.run_command( diff --git a/tests/functional/modules/test_zos_mvs_raw_func.py b/tests/functional/modules/test_zos_mvs_raw_func.py index 230367175..851b3fdb1 100644 --- a/tests/functional/modules/test_zos_mvs_raw_func.py +++ b/tests/functional/modules/test_zos_mvs_raw_func.py @@ -563,7 +563,10 @@ def test_record_formats(ansible_zos_module, record_format, volumes_on_systems): ("text", "IDCAMS SYSTEM"), ( "base64", - "������@@������", + "8cnEw8HU4kBA4uji48XUQOLF2eXJw8Xi" + # the above corresponds to the following bytes: + # f1 c9 c4 c3 c1 d4 e2 40 40 e2 e8 e2 e3 c5 d4 40 e2 c5 d9 e5 c9 c3 c5 e2 + # which translate in ebdic to: "1IDCAMS SYSTEM SERVICE" ), ], ) @@ -1096,7 +1099,10 @@ def test_input_provided_as_list(ansible_zos_module): ("text", "LISTCAT ENTRIES"), ( "base64", - "@�������@�������", + "QNPJ4uPDweNAxdXj2cnF4k1", + # the above corresponds to the following bytes: + # 40 d3 c9 e2 e3 c3 c1 e3 40 c5 d5 e3 d9 c9 c5 e2 + # which translate in ebdic to: " LISTCAT ENTRIES" ), ], ) @@ -1148,7 +1154,6 @@ def test_input_return_content_types(ansible_zos_module, return_content_type, exp "iso8859-1", "ibm-1047", "|�qBFfeF|g�F�qgB��", - ), ( "ibm-1047", @@ -1569,7 +1574,10 @@ def test_file_record_format(ansible_zos_module, record_format): ("text", "IDCAMS SYSTEM"), ( "base64", - "�������@@������@��������@", + "8cnEw8HU4kBA4uji48XUQOLF2eXJw8Xi", + # the above corresponds to the following bytes: + # f1 c9 c4 c3 c1 d4 e2 40 40 e2 e8 e2 e3 c5 d4 40 e2 c5 d9 e5 c9 c3 c5 e2 + # which translate in ebdic to: "1IDCAMS SYSTEM SERVICES" ), ], ) From 61808ca0889e07445944bcbd1bebc1da77b17825 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Mon, 30 Sep 2024 15:24:41 -0700 Subject: [PATCH 470/495] Staging v1.11.0 (#1726) (#1742) * Staging v1.11.0 (#1726) * Fixed zos_copy * Fixed encode test case * update version and remove author Signed-off-by: ddimatos <dimatos@gmail.com> * Python is EOS 9-30, updated meta Signed-off-by: ddimatos <dimatos@gmail.com> * Update README with new links and cleanup Signed-off-by: ddimatos <dimatos@gmail.com> * Update GA dates Signed-off-by: ddimatos <dimatos@gmail.com> * Update module docs with auto generations Signed-off-by: ddimatos <dimatos@gmail.com> * Add changelog summary Signed-off-by: ddimatos <dimatos@gmail.com> * Changelog updates Signed-off-by: ddimatos <dimatos@gmail.com> * remove stale changelog files Signed-off-by: ddimatos <dimatos@gmail.com> * update galaxy exclusions Signed-off-by: ddimatos <dimatos@gmail.com> * update release notes for 1.11.0 Signed-off-by: ddimatos <dimatos@gmail.com> * Remove unused imports from action plugin Signed-off-by: ddimatos <dimatos@gmail.com> * Corrections for flake8 Signed-off-by: ddimatos <dimatos@gmail.com> * Updated example to not escape the paren Signed-off-by: ddimatos <dimatos@gmail.com> * Update module doc to correct use of monospace Signed-off-by: ddimatos <dimatos@gmail.com> * Corretions to release notes Signed-off-by: ddimatos <dimatos@gmail.com> * Corrections Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Update test case match to more strict comparison Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- CHANGELOG.rst | 8 +- README.md | 13 +- changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 26 ++- .../fragments/v1.11.0-beta.1_summary.yml | 6 - docs/source/modules/zos_blockinfile.rst | 2 +- docs/source/modules/zos_lineinfile.rst | 2 +- docs/source/modules/zos_unarchive.rst | 6 +- docs/source/release_notes.rst | 170 +++++++++++------- .../source/resources/releases_maintenance.rst | 2 +- galaxy.yml | 5 +- meta/ibm_zos_core_meta.yml | 4 +- plugins/modules/zos_blockinfile.py | 2 +- plugins/modules/zos_lineinfile.py | 2 +- plugins/modules/zos_unarchive.py | 6 +- 15 files changed, 152 insertions(+), 104 deletions(-) delete mode 100644 changelogs/fragments/v1.11.0-beta.1_summary.yml diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d23ceb7ed..dab461fbf 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,13 +4,13 @@ ibm.ibm\_zos\_core Release Notes .. contents:: Topics -v1.11.0-beta.1 -============== +v1.11.0 +======= Release Summary --------------- -Release Date: '2024-08-05' +Release Date: '2024-10-01' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. For additional details such as required dependencies and availability review @@ -32,11 +32,11 @@ Minor Changes - zos_job_submit - add support for generation data groups and generation data sets as sources for jobs. (https://github.com/ansible-collections/ibm_zos_core/pull/1497) - zos_lineinfile - Added support for GDG and GDS relative name notation to use a data set. And backup in new generations. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). - zos_mount - Added support for data set names with special characters ($, /#, /- and @). This is for both src and backup data set names. (https://github.com/ansible-collections/ibm_zos_core/pull/1631). -- zos_tso_command - Added support for GDG and GDS relative name notation to use a data set name. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1563). - zos_mvs_raw - Added support for GDG and GDS relative name notation to use a data set. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1525). - zos_mvs_raw - Added support for GDG and GDS relative positive name notation to use a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1541). - zos_mvs_raw - Redesign the wrappers of dd clases to use properly the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1470). - zos_script - Improved the copy to remote mechanic to avoid using deepcopy that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). +- zos_tso_command - Added support for GDG and GDS relative name notation to use a data set name. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1563). - zos_unarchive - Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). - zos_unarchive - Improved the copy to remote mechanic to avoid using deepcopy that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). diff --git a/README.md b/README.md index e0d274bad..27878fbac 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,6 @@ The **IBM z/OS core** collection enables Ansible to interact with z/OS Data Sets The **IBM z/OS core** collection is part of the **Red Hat® Ansible Certified Content for IBM Z®** offering that brings Ansible automation to IBM Z®. This collection brings forward the possibility to manage batch jobs, perform program authorizations, run operator operations, and execute both JES and MVS commands as well as execute shell, python, and REXX scripts. It supports data set creation, searching, copying, fetching, and encoding. It provides both archiving and unarchiving of data sets, initializing volumes, performing backups and supports Jinja templating. - <br/>System programmers can enable pipelines to setup, tear down and deploy applications while system administrators can automate time consuming repetitive tasks inevitably freeing up their time. New z/OS users can find comfort in Ansible's familiarity and expedite their proficiency in record time. ## Requirements @@ -64,7 +63,7 @@ after an update. ```sh PYZ: "path_to_python_installation_on_zos_target" ZOAU: "path_to_zoau_installation_on_zos_target" -ZOAU_PYTHONPATH: "path_to_zoau_wheel_installation_directory" +ZOAU_PYTHON_LIBRARY_PATH: "path_to_zoau_wheel_installation_directory" ansible_python_interpreter: "{{ PYZ }}/bin/python3" @@ -135,12 +134,10 @@ All releases will meet the following test criteria. * ansible-core v2.15.x * Python 3.11.x -* IBM Open Enterprise SDK for Python 3.11.x +* IBM Open Enterprise SDK for Python 3.12.x * IBM Z Open Automation Utilities (ZOAU) 1.3.1.x * z/OS V2R5 -This release introduces case sensitivity for option values and includes a porting guide in the [release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html) to assist with which option values will need to be updated. - ## Contributing This community is not currently accepting contributions. However, we encourage you to open [git issues](https://github.com/ansible-collections/ibm_zos_core/issues) for bugs, comments or feature requests and check back periodically for when community contributions will be accepted in the near future. @@ -154,7 +151,7 @@ If you would like to communicate with this community, you can do so through the * GitHub [discussions](https://github.com/ansible-collections/ibm_zos_core/discussions). * GitHub [issues](https://github.com/ansible-collections/ibm_zos_core/issues/new/choose). * [Ansible Forum](https://forum.ansible.com/), please use the `zos` tag to ensure proper awareness. -* Discord [System Z Enthusiasts](https://discord.gg/Kmy5QaUGbB) room [ansible](https://discord.gg/nHrDdRTC). +* Discord [System Z Enthusiasts](https://discord.gg/sze) room `ansible`. * Matrix general usage questions [room](https://matrix.to/#/#users:ansible.com). ## Support @@ -178,8 +175,8 @@ For Galaxy and GitHub users, to see the supported ansible-core versions, review | Version | Status | Release notes | Changelogs | |----------|----------------|---------------|------------| | 1.12.x | In development | unreleased | unreleased | -| 1.11.x | In preview | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-11-0-beta.1) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.11.0-beta.1/CHANGELOG.rst) | -| 1.10.x | Current | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-10-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0/CHANGELOG.rst) | +| 1.11.x | Current | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-11-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.11.0/CHANGELOG.rst) | +| 1.10.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-10-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0/CHANGELOG.rst) | | 1.9.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-9-2) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.2/CHANGELOG.rst) | | 1.8.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-8-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | | 1.7.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-7-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.7.0/CHANGELOG.rst) | diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index dcc631cd0..6aa86eff0 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -135,4 +135,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.11.0-beta.1 +version: 1.11.0 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 3c48425d7..064ab6d62 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -259,6 +259,20 @@ releases: - 992-fix-sanity4to6.yml - v1.10.0-beta.1_summary.yml release_date: '2024-05-08' + 1.11.0: + changes: + release_summary: 'Release Date: ''2024-10-01'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - v1.11.0_summary.yml + release_date: '2024-09-25' 1.11.0-beta.1: changes: bugfixes: @@ -283,9 +297,9 @@ releases: - zos_archive - Added support for GDG and GDS relative name notation to archive data sets. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). - - zos_backup_restore - Added support for GDS relative name notation to include or - exclude data sets when operation is backup. Added support for data set names - with special characters like $, /#, and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1527). + - zos_backup_restore - Added support for GDS relative name notation to include + or exclude data sets when operation is backup. Added support for data set + names with special characters like $, /#, and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1527). - zos_blockinfile - Added support for GDG and GDS relative name notation to use a data set. And backup in new generations. Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). @@ -315,11 +329,11 @@ releases: to use a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1541). - zos_mvs_raw - Redesign the wrappers of dd clases to use properly the arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1470). - - zos_tso_command - Added support for GDG and GDS relative name notation to use - a data set name. Added support for data set names with special characters - like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1563). - zos_script - Improved the copy to remote mechanic to avoid using deepcopy that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + - zos_tso_command - Added support for GDG and GDS relative name notation to + use a data set name. Added support for data set names with special characters + like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1563). - zos_unarchive - Added support for data set names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). - zos_unarchive - Improved the copy to remote mechanic to avoid using deepcopy diff --git a/changelogs/fragments/v1.11.0-beta.1_summary.yml b/changelogs/fragments/v1.11.0-beta.1_summary.yml deleted file mode 100644 index 5c1d60f94..000000000 --- a/changelogs/fragments/v1.11.0-beta.1_summary.yml +++ /dev/null @@ -1,6 +0,0 @@ -release_summary: | - Release Date: '2024-08-05' - This changelog describes all changes made to the modules and plugins included - in this collection. The release date is the date the changelog is created. - For additional details such as required dependencies and availability review - the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ \ No newline at end of file diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index fdd98d0f8..4a61287f8 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -33,7 +33,7 @@ src The USS file must be an absolute pathname. - Generation data set (GDS) relative name of generation already created. ``e.g. SOME.CREATION(-1).`` + Generation data set (GDS) relative name of generation already created. e.g. *SOME.CREATION(-1*). | **required**: True | **type**: str diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index 1db6545c5..a71621277 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -33,7 +33,7 @@ src The USS file must be an absolute pathname. - Generation data set (GDS) relative name of generation already created. ``e.g. SOME.CREATION(-1).`` + Generation data set (GDS) relative name of generation already created. e.g. *SOME.CREATION(-1*). | **required**: True | **type**: str diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index 89b4b065c..07988a47e 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -39,7 +39,7 @@ src MVS data sets supported types are ``SEQ``, ``PDS``, ``PDSE``. - GDS relative names are supported ``e.g. USER.GDG(-1)``. + GDS relative names are supported. e.g. *USER.GDG(-1*). | **required**: True | **type**: str @@ -151,7 +151,7 @@ owner include A list of directories, files or data set names to extract from the archive. - GDS relative names are supported ``e.g. USER.GDG(-1)``. + GDS relative names are supported. e.g. *USER.GDG(-1*). When ``include`` is set, only those files will we be extracted leaving the remaining files in the archive. @@ -165,7 +165,7 @@ include exclude List the directory and file or data set names that you would like to exclude from the unarchive action. - GDS relative names are supported ``e.g. USER.GDG(-1)``. + GDS relative names are supported. e.g. *USER.GDG(-1*). Mutually exclusive with include. diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 45f3f100a..e2ee60586 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,75 +6,92 @@ Releases ======== -Version 1.11.0-beta.1 -===================== +Version 1.11.0 +============== Minor Changes ------------- -- ``zos_apf`` - Added support that auto-escapes 'library' names containing symbols. -- ``zos_archive`` - Added support for GDG and GDS relative name notation to archive data sets. Added support for data set names with special characters like $, /#, /- and @. -- ``zos_backup_restore`` - Added support for GDS relative name notation to include or exclude data sets when operation is backup. Added support for data set names with special characters like $, /#, and @. -- ``zos_blockinfile`` - Added support for GDG and GDS relative name notation to specify a data set. And backup in new generations. Added support for data set names with special characters like $, /#, /- and @. -- ``zos_copy`` - Added support for copying from and copying to generation data sets (GDS) and generation data groups (GDG) including using a GDS for backup. -- ``zos_data_set`` - Added support for GDG and GDS relative name notation to create, delete, catalog and uncatalog a data set. Added support for data set names with special characters like $, /#, /- and @. -- ``zos_encode`` - Added support for converting the encodings of generation data sets (GDS). Also added support to backup into GDS. -- ``zos_fetch`` - Added support for fetching generation data groups (GDG) and generation data sets (GDS). Added support for specifying data set names with special characters like $, /#, /- and @. -- ``zos_find`` - Added support for finding generation data groups (GDG) and generation data sets (GDS). Added support for specifying data set names with special characters like $, /#, /- and @. -- ``zos_job_submit`` +- ``zos_apf`` - Added support for data set names (libraries) with special characters ($, /#, /- and @). +- ``zos_archive`` - - Improved the mechanism for copying to remote systems by removing the use of deepcopy, which had previously resulted in the module failing on some systems. - - Added support for running JCL stored in generation data groups (GDG) and generation data sets (GDS). + - Added support for GDG and GDS relative name notation to archive data sets. + - Added support for data set names with special characters ($, /#, /- and @). -- ``zos_lineinfile`` - Added support for GDG and GDS relative name notation to specify the target data set and to backup into new generations. Added support for data set names with special characters like $, /#, /- and @. -- ``zos_mount`` - Added support for data set names with special characters ($, /#, /- and @). -- ``zos_mvs_raw`` - Added support for GDG and GDS relative name notation to specify data set names. Added support for data set names with special characters like $, /#, /- and @. -- ``zos_script`` - Improved the mechanism for copying to remote systems by removing the use of deepcopy, which had previously resulted in the module failing on some systems. -- ``zos_tso_command`` - Added support for using GDG and GDS relative name notation in running TSO commands. Added support for data set names with special characters like $, /#, /- and @. -- ``zos_unarchive`` +- ``zos_backup_restore`` - - Added support for data set names with special characters like $, /#, /- and @. - - Improved the mechanism for copying to remote systems by removing the use of deepcopy, which had previously resulted in the module failing on some systems. + - Added support for GDS relative name notation to include or exclude data sets when operation is backup. + - Added support for data set names with special characters ($, /#, /- and @). -Bugfixes --------- +- ``zos_blockinfile`` + + - Added support for GDG and GDS relative name notation to specify a data set. And backup in new generations. + - Added support for data set names with special characters ($, /#, /- and @). - ``zos_copy`` - - a regression in version 1.4.0 made the module stop automatically computing member names when copying a single file into a PDS/E. Fix now lets a user copy a single file into a PDS/E without adding a member in the dest option. - - module would use opercmd to check if a non existent destination data set is locked. Fix now only checks if the destination is already present. + - Added support for copying from and to generation data sets (GDS) and generation data groups (GDG) including using a GDS for backup. + - Added support for data set names with special characters ($, /#, /- and @). -- ``zos_data_set`` - When checking if a data set is cataloged, module failed to account for exceptions which occurred during the LISTCAT. The fix now raises an MVSCmdExecError if the return code from LISTCAT is too high. -- ``zos_job_submit`` - The module was not propagating any error types including UnicodeDecodeError, JSONDecodeError, TypeError, KeyError when encountered. The fix now shares the type error in the error message. -- ``zos_mvs_raw`` - The first character of each line in dd_output was missing. The fix now includes the first character of each line. +- ``zos_data_set`` -Availability ------------- + - Added support for GDG and GDS relative name notation to create, delete, catalog and uncatalog a data set. + - Added support for data set names with special characters ($, /#, /- and @). -* `Galaxy`_ -* `GitHub`_ +- ``zos_encode`` -Requirements ------------- + - Added support for converting the encodings of generation data sets (GDS). + - Added support for data set names with special characters ($, /#, /- and @). -The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the -controller and z/OS managed node dependencies. +- ``zos_fetch`` -Known Issues ------------- -- ``zos_job_submit`` - when setting 'location' to 'local' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. -- ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. -- ``zos_apf`` - When trying to remove a library that contains the '$' character in the name from APF(authorized program facility), operation will fail. -- In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. + - Added support for fetching generation data groups (GDG) and generation data sets (GDS). + - Added support for data set names with special characters ($, /#, /- and @). +- ``zos_find`` -Version 1.9.2 -============= + - Added support for finding generation data groups (GDG) and generation data sets (GDS). + - Added support for data set names with special characters ($, /#, /- and @). + +- ``zos_job_submit`` + + - Improved the mechanism for copying to remote systems by removing the use of deepcopy, which had previously resulted in the module failing on some systems. + - Added support for running JCL stored in generation data groups (GDG) and generation data sets (GDS). + - Added support for data set names with special characters ($, /#, /- and @). + +- ``zos_lineinfile`` + + - Added support for GDG and GDS relative name notation to specify the target data set and to backup into new generations. + - Added support for data set names with special characters ($, /#, /- and @). + +- ``zos_mount`` - Added support for data set names with special characters ($, /#, /- and @). +- ``zos_mvs_raw`` + + - Added support for GDG and GDS relative name notation to specify data set names. + - Added support for data set names with special characters ($, /#, /- and @). + +- ``zos_script`` - Improved the mechanism for copying to remote systems by removing the use of deepcopy, which had previously resulted in the module failing on some systems. +- ``zos_tso_command`` + + - Added support for using GDG and GDS relative name notation in running TSO commands. + - Added support for data set names with special characters ($, /#, /- and @). + +- ``zos_unarchive`` + + - Improved the mechanism for copying to remote systems by removing the use of deepcopy, which had previously resulted in the module failing on some systems. + - Added support for data set names with special characters ($, /#, /- and @). Bugfixes -------- -- ``zos_copy`` - when creating the destination data set, the module would unnecessarily check if a data set is locked by another process. The module no longer performs this check when it creates the data set. +- ``zos_copy`` + + - Fixes the issue that prevents the module from automatically computing member names when copying a file into a PDS/E. The module now computes the member name when copying into a PDS/E. + - Fixes an issue that would perform an unnecessary check if a destination data set is locked for data sets the module created. The module only performs this check for destinations that are present. + +- ``zos_data_set`` - When checking if a data set is cataloged, module failed to account for exceptions which occurred during the LISTCAT. The module now raises an MVSCmdExecError if the return code from LISTCAT exceeds the determined threshold. +- ``zos_job_submit`` - Was not propagating any error types including UnicodeDecodeError, JSONDecodeError, TypeError, KeyError when encountered. The module now shares the error type (UnicodeDecodeError, JSONDecodeError, TypeError, KeyError) in the error message. +- ``zos_mvs_raw`` - The first character of each line in dd_output was missing. The module now includes the first character of each line. Availability ------------ @@ -87,28 +104,13 @@ Requirements ------------ The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the -controller and z/OS managed node dependencies. +control node and z/OS managed node dependencies. Known Issues ------------ - -- ``zos_job_submit`` - when setting 'location' to 'LOCAL' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. +- ``zos_job_submit`` - when setting 'location' to 'local' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. - ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. - -- ``zos_job_submit``, ``zos_job_output``, ``zos_operator_action_query`` - encounters UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. This has been addressed in this release and corrected with **ZOAU version 1.2.5.6** or later. - - - If the appropriate level of ZOAU can not be installed, some options are to: - - - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. - - Ignore module errors by using **ignore_errors:true** for a specific playbook task. - - If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a registered variable to extract the - job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. - - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. - -- ``zos_data_set`` - An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended and documented **space_primary** option. - -- In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. - +- ``zos_apf`` - When trying to remove a library that contains the '$' character in the name from APF(authorized program facility), operation will fail. Version 1.10.0 ============== @@ -228,6 +230,46 @@ Known Issues - In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. - Use of special characters (#, @, $, \- ) in different options like data set names and commands is not fully supported, some modules support them but is the user responsibility to escape them. Read each module documentation for further details. +Version 1.9.2 +============= + +Bugfixes +-------- + +- ``zos_copy`` - when creating the destination data set, the module would unnecessarily check if a data set is locked by another process. The module no longer performs this check when it creates the data set. + +Availability +------------ + +* `Automation Hub`_ +* `Galaxy`_ +* `GitHub`_ + +Requirements +------------ + +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. + +Known Issues +------------ + +- ``zos_job_submit`` - when setting 'location' to 'LOCAL' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. +- ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. + +- ``zos_job_submit``, ``zos_job_output``, ``zos_operator_action_query`` - encounters UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. This has been addressed in this release and corrected with **ZOAU version 1.2.5.6** or later. + + - If the appropriate level of ZOAU can not be installed, some options are to: + + - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. + - Ignore module errors by using **ignore_errors:true** for a specific playbook task. + - If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a registered variable to extract the + job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. + +- ``zos_data_set`` - An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended and documented **space_primary** option. + +- In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. Version 1.9.1 ============= diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index 9a5adbce8..a9d30942a 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -89,7 +89,7 @@ The z/OS managed node includes several shells, currently the only supported shel +---------+----------------------------+---------------------------------------------------+---------------+---------------+ | Version | Controller | Managed Node | GA | End of Life | +=========+============================+===================================================+===============+===============+ -| 1.11.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V3Rx | In preview | TBD | +| 1.11.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V2Rx | 1 Oct 2024 | 1 Oct 2026 | | |- `Ansible`_ >=8.0.x |- `z/OS shell`_ | | | | |- `AAP`_ >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | | | |- IBM `Z Open Automation Utilities`_ >=1.3.1 | | | diff --git a/galaxy.yml b/galaxy.yml index 910442ef8..deee9a6e1 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: "1.11.0-beta.1" +version: "1.11.0" # Collection README file readme: README.md @@ -18,7 +18,6 @@ authors: - Ketan Kelkar <ketan.kelkar@ibm.com> - Ivan Moreno <ivan.moreno.soto@ibm.com> - Oscar Fernando Flores Garcia <fernando.flores@ibm.com> - - Jenny Huang <jennyhuang@ibm.com> - Marcel Gutierrez <andre.marcel.gutierrez@ibm.com> # Description @@ -98,3 +97,5 @@ build_ignore: - tests/sanity/ignore-2.13.txt - tests/sanity/ignore-2.14.txt - venv* + - ansible_collections + - '*.log' diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 16ee31ca9..b130f6a5a 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,9 +1,9 @@ name: ibm_zos_core -version: "1.11.0-beta.1" +version: "1.11.0" managed_requirements: - name: "IBM Open Enterprise SDK for Python" - version: ">=3.10" + version: ">=3.11" - name: "Z Open Automation Utilities" version: diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 2befe61ef..dd388bc8e 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -39,7 +39,7 @@ PS (sequential data set), member of a PDS or PDSE, PDS, PDSE. - The USS file must be an absolute pathname. - Generation data set (GDS) relative name of generation already - created. ``e.g. SOME.CREATION(-1).`` + created. e.g. I(SOME.CREATION(-1)). type: str aliases: [ path, destfile, name ] required: true diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index c5f262fe0..d299f9cf3 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -37,7 +37,7 @@ PS (sequential data set), member of a PDS or PDSE, PDS, PDSE. - The USS file must be an absolute pathname. - Generation data set (GDS) relative name of generation already - created. ``e.g. SOME.CREATION(-1).`` + created. e.g. I(SOME.CREATION(-1)). type: str aliases: [ path, destfile, name ] required: true diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index f5febbf90..014d204a6 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -36,7 +36,7 @@ - I(src) can be a USS file or MVS data set name. - USS file paths should be absolute paths. - MVS data sets supported types are C(SEQ), C(PDS), C(PDSE). - - GDS relative names are supported ``e.g. USER.GDG(-1)``. + - GDS relative names are supported. e.g. I(USER.GDG(-1)). type: str required: true format: @@ -146,7 +146,7 @@ description: - A list of directories, files or data set names to extract from the archive. - - GDS relative names are supported ``e.g. USER.GDG(-1)``. + - GDS relative names are supported. e.g. I(USER.GDG(-1)). - When C(include) is set, only those files will we be extracted leaving the remaining files in the archive. - Mutually exclusive with exclude. @@ -157,7 +157,7 @@ description: - List the directory and file or data set names that you would like to exclude from the unarchive action. - - GDS relative names are supported ``e.g. USER.GDG(-1)``. + - GDS relative names are supported. e.g. I(USER.GDG(-1)). - Mutually exclusive with include. type: list elements: str From 45084827211ba84dafa88f4af8856b38f43910f1 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Wed, 2 Oct 2024 09:00:56 -0600 Subject: [PATCH 471/495] [Bugfix] [zos_script] Fix missing args when splitting command (#1698) * Fix missing args when splitting command * Update args test * Add changelog fragment --- .../fragments/1698-multiple-args-zos_script.yml | 5 +++++ plugins/action/zos_script.py | 2 +- tests/functional/modules/test_zos_script_func.py | 12 +++++++++--- 3 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/1698-multiple-args-zos_script.yml diff --git a/changelogs/fragments/1698-multiple-args-zos_script.yml b/changelogs/fragments/1698-multiple-args-zos_script.yml new file mode 100644 index 000000000..636c882f6 --- /dev/null +++ b/changelogs/fragments/1698-multiple-args-zos_script.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_script - The module would discard command line arguments in a command, + except for the first one. Fix now makes sure that all arguments are + passed to the remote command that gets executed. + (https://github.com/ansible-collections/ibm_zos_core/pull/1698). \ No newline at end of file diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py index d51c48ddf..53cc8b2c0 100644 --- a/plugins/action/zos_script.py +++ b/plugins/action/zos_script.py @@ -46,7 +46,7 @@ def run(self, tmp=None, task_vars=None): return result script_path = cmd_parts[0] - script_args = cmd_parts[1] if len(cmd_parts) > 1 else "" + script_args = ' '.join(cmd_parts[1:]) if len(cmd_parts) > 1 else "" remote_src = self._process_boolean(module_args.get('remote_src')) user_cmd = tempfile_path = None diff --git a/tests/functional/modules/test_zos_script_func.py b/tests/functional/modules/test_zos_script_func.py index ee213bdf0..e7c83e9ed 100644 --- a/tests/functional/modules/test_zos_script_func.py +++ b/tests/functional/modules/test_zos_script_func.py @@ -23,7 +23,7 @@ # Using || to concatenate strings without extra spaces. REXX_SCRIPT_ARGS = """/* REXX */ -parse arg A ',' B +parse arg 'FIRST=' A ' SECOND=' B say 'args are ' || A || ',' || B return 0 @@ -180,7 +180,9 @@ def test_rexx_script_with_args(ansible_zos_module): rexx_script = REXX_SCRIPT_ARGS script_path = create_local_file(rexx_script, 'rexx') - args = '1,2' + first_arg = 'one' + second_arg = 'two' + args = f'FIRST={first_arg} SECOND={second_arg}' cmd = f"{script_path} '{args}'" zos_script_result = hosts.all.zos_script( @@ -191,7 +193,11 @@ def test_rexx_script_with_args(ansible_zos_module): assert result.get('changed') is True assert result.get('failed', False) is False assert result.get('rc') == 0 - assert result.get('stdout', '').strip() == f'args are {args}' + assert first_arg in result.get('stdout', '') + assert second_arg in result.get('stdout', '') + # Making sure the action plugin passed every argument to the module. + assert args in result.get('invocation').get('module_args').get('cmd') + assert args in result.get('remote_cmd') assert result.get('stderr', '') == '' finally: if os.path.exists(script_path): From 5c05a2504916611a0be261345cb1dcd5d251ff73 Mon Sep 17 00:00:00 2001 From: Ivan Moreno <iamorenosoto@gmail.com> Date: Fri, 4 Oct 2024 15:16:32 -0600 Subject: [PATCH 472/495] [Bugfix] [module_utils] [multiple modules] tmp_hlq use in utils whenever mvscmd gets called (#1695) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * First iteration * Fix blockinfile test * Fix remove * Add fragment * Remove remaining * Fix change * Fix blockinfile * Add use of tmphlq to dataset utils class * Update tmphlq in modules * Remove all tmp occurrences * Fix tmp * Fix tmp * Update tmphlq use in zos_encode * Add tmphlq to data_set_exists * Update module utils * Update modules * Fix tmp_hlq in zos_copy * Fix ensure_present for members * Fix wrong keyword arg in dataset util * Update zos_archive and zos_unarchive * Fix tmphlq error in zos_archive * Add changelog fragment * Update changelog fragment * Update dataset function calls * Fix function calls in zos_data_set * Fix missing import --------- Co-authored-by: André Marcel Gutiérrez Benítez <amgutierrezbenitez@hotmail.com> --- .../1695-tmp_hlq_when_calling_mvscmd.yml | 31 ++ plugins/module_utils/backup.py | 29 +- plugins/module_utils/copy.py | 26 +- plugins/module_utils/data_set.py | 328 ++++++++++++------ plugins/module_utils/encode.py | 20 +- plugins/module_utils/mvs_cmd.py | 12 +- plugins/module_utils/vtoc.py | 26 +- plugins/modules/zos_apf.py | 2 +- plugins/modules/zos_archive.py | 8 +- plugins/modules/zos_blockinfile.py | 2 +- plugins/modules/zos_copy.py | 81 +++-- plugins/modules/zos_data_set.py | 10 +- plugins/modules/zos_encode.py | 35 +- plugins/modules/zos_fetch.py | 9 +- plugins/modules/zos_lineinfile.py | 2 +- plugins/modules/zos_mount.py | 6 +- plugins/modules/zos_unarchive.py | 2 +- .../modules/test_zos_backup_restore.py | 1 + 18 files changed, 432 insertions(+), 198 deletions(-) create mode 100644 changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml diff --git a/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml b/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml new file mode 100644 index 000000000..2539501ea --- /dev/null +++ b/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml @@ -0,0 +1,31 @@ +bugfixes: + - zos_apf - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_archive - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_blockinfile - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_copy - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_data_set - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_encode - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_fetch - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_lineinfile - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_mount - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_unarchive - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). \ No newline at end of file diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index 716e0d3b2..eb2a22004 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -76,6 +76,8 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): It could be an MVS PS/PDS/PDSE/VSAM(KSDS), etc. bk_dsn : str The name of the backup data set. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -128,14 +130,14 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): if DataSet.is_gds_positive_relative_name(bk_dsn): cp_rc = datasets.copy(dsn, bk_dsn) else: - cp_rc = _copy_ds(dsn, bk_dsn) + cp_rc = _copy_ds(dsn, bk_dsn, tmphlq=tmphlq) if cp_rc == 12: # The data set is probably a PDS or PDSE # Delete allocated backup that was created when attempting to use _copy_ds() # Safe to delete because _copy_ds() would have raised an exception if it did # not successfully create the backup data set, so no risk of it predating module invocation datasets.delete(bk_dsn) - _allocate_model(bk_dsn, dsn) + _allocate_model(bk_dsn, dsn, tmphlq=tmphlq) rc, out, err = _copy_pds(dsn, bk_dsn) if rc != 0: raise BackupError( @@ -222,7 +224,7 @@ def uss_file_backup(path, backup_name=None, compress=False): return backup_name -def _copy_ds(ds, bk_ds): +def _copy_ds(ds, bk_ds, tmphlq=None): """Copy the contents of a data set to another. Parameters @@ -231,6 +233,8 @@ def _copy_ds(ds, bk_ds): The source data set to be copied from. Should be SEQ or VSAM. bk_dsn : str The destination data set to copy to. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -243,14 +247,19 @@ def _copy_ds(ds, bk_ds): When copying data fails. """ module = AnsibleModuleHelper(argument_spec={}) - _allocate_model(bk_ds, ds) + _allocate_model(bk_ds, ds, tmphlq=tmphlq) repro_cmd = """ REPRO - INDATASET('{0}') - OUTDATASET('{1}')""".format( ds, bk_ds ) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=repro_cmd, errors='replace' + cmd, data=repro_cmd, errors='replace' ) if rc != 0 and rc != 12: datasets.delete(bk_ds) @@ -259,12 +268,12 @@ def _copy_ds(ds, bk_ds): ds, out, err ) ) - if rc != 0 and DataSet.is_empty(ds): + if rc != 0 and DataSet.is_empty(ds, tmphlq=tmphlq): rc = 0 return rc -def _allocate_model(ds, model): +def _allocate_model(ds, model, tmphlq=None): """Allocate a data set using allocation information of a model data set. Parameters @@ -273,6 +282,8 @@ def _allocate_model(ds, model): The name of the data set to be allocated. model : str The name of the data set whose allocation parameters should be used. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -290,7 +301,11 @@ def _allocate_model(ds, model): LIKE('{1}')""".format( ds, model ) + cmd = "mvscmdauth --pgm=ikjeft01 --systsprt=* --systsin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = module.run_command(cmd, data=alloc_cmd, errors='replace') if rc != 0: raise BackupError( diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 499aecbd9..f8b37b514 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -339,7 +339,7 @@ def copy_mvs2mvs(src, dest, is_binary=False): return rc, out, err -def copy_vsam_ps(src, dest): +def copy_vsam_ps(src, dest, tmphlq=None): """Copy a VSAM(KSDS) data set to a PS data set vise versa. Parameters @@ -348,6 +348,8 @@ def copy_vsam_ps(src, dest): The VSAM(KSDS) or PS data set to be copied. dest : str The PS or VSAM(KSDS) data set. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -367,14 +369,18 @@ def copy_vsam_ps(src, dest): src = _validate_data_set_name(src) dest = _validate_data_set_name(dest) repro_cmd = REPRO.format(src, dest) + cmd = "mvscmdauth --pgm=idcams --sysprint=stdout --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = module.run_command(cmd, data=repro_cmd, errors='replace') if rc: raise USSCmdExecError(cmd, rc, out, err) return rc, out, err -def copy_asa_uss2mvs(src, dest): +def copy_asa_uss2mvs(src, dest, tmphlq=None): """Copy a file from USS to an ASA sequential data set or PDS/E member. Parameters @@ -383,6 +389,8 @@ def copy_asa_uss2mvs(src, dest): Path of the USS file. dest : str The MVS destination data set or member. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -394,12 +402,12 @@ def copy_asa_uss2mvs(src, dest): The stderr after the copy command executed successfully. """ oget_cmd = "OGET '{0}' '{1}'".format(src, dest) - rc, out, err = ikjeft01(oget_cmd, authorized=True) + rc, out, err = ikjeft01(oget_cmd, authorized=True, tmphlq=tmphlq) return TSOCmdResponse(rc, out, err) -def copy_asa_mvs2uss(src, dest): +def copy_asa_mvs2uss(src, dest, tmphlq=None): """Copy an ASA sequential data set or member to USS. Parameters @@ -408,6 +416,8 @@ def copy_asa_mvs2uss(src, dest): The MVS data set to be copied. dest : str Destination path in USS. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -422,12 +432,12 @@ def copy_asa_mvs2uss(src, dest): dest = _validate_path(dest) oput_cmd = "OPUT '{0}' '{1}'".format(src, dest) - rc, out, err = ikjeft01(oput_cmd, authorized=True) + rc, out, err = ikjeft01(oput_cmd, authorized=True, tmphlq=tmphlq) return TSOCmdResponse(rc, out, err) -def copy_asa_pds2uss(src, dest): +def copy_asa_pds2uss(src, dest, tmphlq=None): """Copy all members from an ASA PDS/E to USS. Parameters @@ -436,6 +446,8 @@ def copy_asa_pds2uss(src, dest): The MVS data set to be copied. dest : str Destination path in USS (must be a directory). + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -465,7 +477,7 @@ def copy_asa_pds2uss(src, dest): dest_path = path.join(dest, member) oput_cmd = "OPUT '{0}' '{1}'".format(src_member, dest_path) - rc, out, err = ikjeft01(oput_cmd, authorized=True) + rc, out, err = ikjeft01(oput_cmd, authorized=True, tmphlq=tmphlq) if rc != 0: return TSOCmdResponse(rc, out, err) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 7b81fe2d1..c1740b5a4 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -174,7 +174,7 @@ def ensure_present( arguments.pop("replace", None) present = False changed = False - if DataSet.data_set_cataloged(name): + if DataSet.data_set_cataloged(name, tmphlq=tmp_hlq): present = True if not present: @@ -185,7 +185,7 @@ def ensure_present( # data set exists on volume if "Error Code: 0x4704" in e.msg: present, changed = DataSet.attempt_catalog_if_necessary( - name, volumes + name, volumes, tmphlq=tmp_hlq ) if present and changed: raise_error = False @@ -200,26 +200,28 @@ def ensure_present( return True @staticmethod - def ensure_absent(name, volumes=None): + def ensure_absent(name, volumes=None, tmphlq=None): """Deletes provided data set if it exists. Arguments: name (str) -- The name of the data set to ensure is absent. volumes (list[str]) -- The volumes the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: changed (bool) -- Indicates if changes were made. """ - changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes) + changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=tmphlq) return changed # ? should we do additional check to ensure member was actually created? @staticmethod - def ensure_member_present(name, replace=False): + def ensure_member_present(name, replace=False, tmphlq=None): """Creates data set member if it does not already exist. Arguments: name (str) -- The name of the data set to ensure is present. replace (bool) -- Used to determine behavior when data set already + tmphlq (str) -- High Level Qualifier for temporary datasets. exists. Returns: @@ -229,7 +231,7 @@ def ensure_member_present(name, replace=False): if not replace: return False DataSet.delete_member(name) - DataSet.create_member(name) + DataSet.create_member(name, tmphlq=tmphlq) return True @staticmethod @@ -242,21 +244,22 @@ def ensure_member_absent(name, force=False): return False @staticmethod - def ensure_cataloged(name, volumes): + def ensure_cataloged(name, volumes, tmphlq=None): """Ensure a data set is cataloged. Data set can initially be in cataloged or uncataloged state when this function is called. Arguments: name (str) -- The data set name to ensure is cataloged. volume (str) -- The volume on which the data set should exist. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If changes were made. """ - if DataSet.data_set_cataloged(name, None): + if DataSet.data_set_cataloged(name, None, tmphlq=tmphlq): return False try: - DataSet.catalog(name, volumes) + DataSet.catalog(name, volumes, tmphlq=tmphlq) except DatasetCatalogError: raise DatasetCatalogError( name, volumes, "-1", "Data set was not found. Unable to catalog." @@ -264,23 +267,24 @@ def ensure_cataloged(name, volumes): return True @staticmethod - def ensure_uncataloged(name): + def ensure_uncataloged(name, tmphlq=None): """Ensure a data set is uncataloged. Data set can initially be in cataloged or uncataloged state when this function is called. Arguments: name (str) -- The data set name to ensure is uncataloged. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If changes were made. """ - if DataSet.data_set_cataloged(name): - DataSet.uncatalog(name) + if DataSet.data_set_cataloged(name, tmphlq=tmphlq): + DataSet.uncatalog(name, tmphlq=tmphlq) return True return False @staticmethod - def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None): + def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None, tmphlq=None): """Allocates a data set based on the attributes of a 'model' data set. Useful when a data set needs to be created identical to another. Supported model(s) are Physical Sequential (PS), Partitioned Data Sets (PDS/PDSE), @@ -297,17 +301,18 @@ def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vo asa_text {bool} -- Whether the new data set should support ASA control characters (have record format FBA) vol {str} -- The volume where data set should be allocated + tmphlq {str} -- High Level Qualifier for temporary datasets. Raise: NonExistentSourceError: When the model data set does not exist. MVSCmdExecError: When the call to IKJEFT01 to allocate the data set fails. """ - if not DataSet.data_set_exists(model): + if not DataSet.data_set_exists(model, tmphlq=tmphlq): raise DatasetNotFoundError(model) ds_name = extract_dsname(ds_name) - model_type = DataSet.data_set_type(model) + model_type = DataSet.data_set_type(model, tmphlq=tmphlq) # The break lines are absolutely necessary, a JCL code line can't # be longer than 72 characters. The following JCL is compatible with @@ -339,12 +344,12 @@ def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vo RECFM(U) - DSNTYPE(LIBRARY)""".format(alloc_cmd) - rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True) + rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True, tmphlq=tmphlq) if rc != 0: raise MVSCmdExecError(rc, out, err) @staticmethod - def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None): + def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None, tmphlq=None): """ Allocates a new current generation of a generation data group using a model data set to set its attributes. @@ -364,6 +369,8 @@ def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False characters (have record format FBA). vol : str, optional The volume where the new data set should be allocated. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -403,7 +410,7 @@ def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False space_type='' ) - success = data_set_object.ensure_present() + success = data_set_object.ensure_present(tmp_hlq=tmphlq) if not success: raise DatasetCreateError( data_set=ds_name, @@ -411,11 +418,12 @@ def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False ) @staticmethod - def data_set_cataloged(name, volumes=None): + def data_set_cataloged(name, volumes=None, tmphlq=None): """Determine if a data set is in catalog. Arguments: name (str) -- The data set name to check if cataloged. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If data is is cataloged. @@ -438,8 +446,15 @@ def data_set_cataloged(name, volumes=None): module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' + cmd, + data=stdin, + errors='replace' ) # The above 'listcat entries' command to idcams returns: @@ -450,7 +465,7 @@ def data_set_cataloged(name, volumes=None): raise MVSCmdExecError(rc, stdout, stderr) if volumes: - cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name) or [] + cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name, tmphlq=tmphlq) or [] if bool(set(volumes) & set(cataloged_volume_list)): return True else: @@ -460,10 +475,11 @@ def data_set_cataloged(name, volumes=None): return False @staticmethod - def data_set_cataloged_volume_list(name): + def data_set_cataloged_volume_list(name, tmphlq=None): """Get the volume list for a cataloged dataset name. Arguments: name (str) -- The data set name to check if cataloged. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: list{str} -- A list of volumes where the dataset is cataloged. Raise: @@ -472,8 +488,15 @@ def data_set_cataloged_volume_list(name): name = name.upper() module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}') ALL".format(name) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' + cmd, + data=stdin, + errors='replace' ) # The above 'listcat entries all' command to idcams returns: # rc=0 if data set found in catalog @@ -494,7 +517,7 @@ def data_set_cataloged_volume_list(name): return volume_list @staticmethod - def data_set_exists(name, volume=None): + def data_set_exists(name, volume=None, tmphlq=None): """Determine if a data set exists. This will check the catalog in addition to the volume table of contents. @@ -502,14 +525,15 @@ def data_set_exists(name, volume=None): Arguments: name (str) -- The data set name to check if exists. volume (str) -- The volume the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If data is found. """ - if DataSet.data_set_cataloged(name): + if DataSet.data_set_cataloged(name, tmphlq=tmphlq): return True elif volume is not None: - return DataSet._is_in_vtoc(name, volume) + return DataSet._is_in_vtoc(name, volume, tmphlq=tmphlq) return False @staticmethod @@ -594,11 +618,12 @@ def files_in_data_set_members(src, dest): return False @staticmethod - def data_set_volume(name): + def data_set_volume(name, tmphlq=None): """Checks the volume where a data set is located. Arguments: name (str) -- The name of the data set. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: str -- Name of the volume where the data set is. @@ -614,7 +639,7 @@ def data_set_volume(name): return data_set_information[0].volume # If listing failed to return a data set, then it's probably a VSAM. - output = DataSet._get_listcat_data(name) + output = DataSet._get_listcat_data(name, tmphlq=tmphlq) if re.findall(r"NOT FOUND|NOT LISTED", output): raise DatasetNotFoundError(name) @@ -627,12 +652,13 @@ def data_set_volume(name): raise DatasetVolumeError(name) @staticmethod - def data_set_type(name, volume=None): + def data_set_type(name, volume=None, tmphlq=None): """Checks the type of a data set, data sets must be cataloged. Arguments: name (str) -- The name of the data set. volume (str) -- The volume the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: str -- The type of the data set (one of "PS", "PO", "DA", "GDG", @@ -640,7 +666,7 @@ def data_set_type(name, volume=None): None -- If the data set does not exist or ZOAU is not able to determine the type. """ - if not DataSet.data_set_exists(name, volume): + if not DataSet.data_set_exists(name, volume, tmphlq=tmphlq): return None data_sets_found = datasets.list_datasets(name) @@ -657,7 +683,7 @@ def data_set_type(name, volume=None): # Next, trying to get the DATA information of a VSAM through # LISTCAT. - output = DataSet._get_listcat_data(name) + output = DataSet._get_listcat_data(name, tmphlq=tmphlq) # Filtering all the DATA information to only get the ATTRIBUTES block. data_set_attributes = re.findall( @@ -677,11 +703,12 @@ def data_set_type(name, volume=None): return None @staticmethod - def _get_listcat_data(name): + def _get_listcat_data(name, tmphlq=None): """Runs IDCAMS to get the DATA information associated with a data set. Arguments: name (str) -- Name of the data set. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: str -- Standard output from IDCAMS. @@ -689,8 +716,13 @@ def _get_listcat_data(name): name = name.upper() module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENT('{0}') DATA ALL".format(name) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' + cmd, data=stdin, errors='replace' ) if rc != 0: @@ -699,20 +731,21 @@ def _get_listcat_data(name): return stdout @staticmethod - def is_empty(name, volume=None): + def is_empty(name, volume=None, tmphlq=None): """Determines whether a data set is empty. Arguments: name (str) -- The name of the data set. volume (str) -- The volume where the data set resides. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- Whether the data set is empty or not. """ - if not DataSet.data_set_exists(name, volume): + if not DataSet.data_set_exists(name, volume, tmphlq=tmphlq): raise DatasetNotFoundError(name) - ds_type = DataSet.data_set_type(name, volume) + ds_type = DataSet.data_set_type(name, volume, tmphlq=tmphlq) if ds_type in DataSet.MVS_PARTITIONED: return DataSet._pds_empty(name) @@ -721,7 +754,7 @@ def is_empty(name, volume=None): rc, stdout, stderr = module.run_command("head \"//'{0}'\"".format(name), errors='replace') return rc == 0 and len(stdout.strip()) == 0 elif ds_type in DataSet.MVS_VSAM: - return DataSet._vsam_empty(name) + return DataSet._vsam_empty(name, tmphlq=tmphlq) @staticmethod def _pds_empty(name): @@ -741,11 +774,12 @@ def _pds_empty(name): return rc == 2 @staticmethod - def _vsam_empty(name): + def _vsam_empty(name, tmphlq=None): """Determines if a VSAM data set is empty. Arguments: name (str) -- The name of the VSAM data set. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool - If VSAM data set is empty. @@ -756,23 +790,30 @@ def _vsam_empty(name): empty_cmd = """ PRINT - INFILE(MYDSET) - COUNT(1)""" + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin --mydset={0}".format( + name + ) + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin --mydset={0}".format( - name), - data=empty_cmd, errors='replace' + cmd, data=empty_cmd, errors='replace' ) + if rc == 4 or "VSAM OPEN RETURN CODE IS 160" in out: return True elif rc != 0: return False @staticmethod - def attempt_catalog_if_necessary(name, volumes): + def attempt_catalog_if_necessary(name, volumes, tmphlq=None): """Attempts to catalog a data set if not already cataloged. Arguments: name (str) -- The name of the data set. volumes (list[str]) -- The volumes the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- Whether the data set is now present. @@ -780,12 +821,12 @@ def attempt_catalog_if_necessary(name, volumes): """ changed = False present = False - if DataSet.data_set_cataloged(name): + if DataSet.data_set_cataloged(name, tmphlq=tmphlq): present = True elif volumes is not None: errors = False try: - DataSet.catalog(name, volumes) + DataSet.catalog(name, volumes, tmphlq=tmphlq) except DatasetCatalogError: errors = True if not errors: @@ -794,7 +835,7 @@ def attempt_catalog_if_necessary(name, volumes): return present, changed @staticmethod - def attempt_catalog_if_necessary_and_delete(name, volumes): + def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): """Attempts to catalog a data set if not already cataloged, then deletes the data set. This is helpful when a data set currently cataloged is not the data @@ -805,6 +846,7 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): Arguments: name (str) -- The name of the data set. volumes (list[str]) -- The volumes the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: changed (bool) -- Whether changes were made. @@ -816,12 +858,12 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): if volumes: # Check if the data set is cataloged - present = DataSet.data_set_cataloged(name) + present = DataSet.data_set_cataloged(name, tmphlq=tmphlq) if present: # Data set is cataloged, now check it its cataloged on the provided volumes # If it is, we just delete because the DS is the right one wanting deletion. - present = DataSet.data_set_cataloged(name, volumes) + present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: DataSet.delete(name) @@ -836,41 +878,41 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): # We need to identify the volumes where the current cataloged data set # is located for use later when we recatalog. Code is strategically # placed before the uncatalog. - cataloged_volume_list_original = DataSet.data_set_cataloged_volume_list(name) + cataloged_volume_list_original = DataSet.data_set_cataloged_volume_list(name, tmphlq=tmphlq) try: - DataSet.uncatalog(name) + DataSet.uncatalog(name, tmphlq=tmphlq) except DatasetUncatalogError: return changed, present # Catalog the data set for the provided volumes try: - DataSet.catalog(name, volumes) + DataSet.catalog(name, volumes, tmphlq=tmphlq) except DatasetCatalogError: try: # A failure, so recatalog the original data set on the original volumes - DataSet.catalog(name, cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) except DatasetCatalogError: pass return changed, present # Check the recatalog, ensure it cataloged before we try to remove - present = DataSet.data_set_cataloged(name, volumes) + present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: try: DataSet.delete(name) except DatasetDeleteError: try: - DataSet.uncatalog(name) + DataSet.uncatalog(name, tmphlq=tmphlq) except DatasetUncatalogError: try: - DataSet.catalog(name, cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) except DatasetCatalogError: pass return changed, present try: - DataSet.catalog(name, cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) changed = True present = False except DatasetCatalogError: @@ -879,18 +921,18 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): return changed, present else: try: - DataSet.catalog(name, volumes) + DataSet.catalog(name, volumes, tmphlq=tmphlq) except DatasetCatalogError: return changed, present - present = DataSet.data_set_cataloged(name, volumes) + present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: DataSet.delete(name) changed = True present = False else: - present = DataSet.data_set_cataloged(name, None) + present = DataSet.data_set_cataloged(name, None, tmphlq=tmphlq) if present: try: DataSet.delete(name) @@ -902,17 +944,18 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): return changed, present @staticmethod - def _is_in_vtoc(name, volume): + def _is_in_vtoc(name, volume, tmphlq=None): """Determines if data set is in a volume's table of contents. Arguments: name (str) -- The name of the data set to search for. volume (str) -- The volume to search the table of contents of. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If data set was found in table of contents for volume. """ - data_sets = vtoc.get_volume_entry(volume) + data_sets = vtoc.get_volume_entry(volume, tmphlq=tmphlq) data_set = vtoc.find_data_set_in_volume_output(name, data_sets) if data_set is not None: return True @@ -1135,7 +1178,7 @@ def create( except exceptions.DatasetVerificationError: # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 if volumes and len(volumes) > 1: - if DataSet.data_set_cataloged(name, volumes): + if DataSet.data_set_cataloged(name, volumes, tmphlq=tmp_hlq): return 0 raise DatasetCreateError( raw_name if raw_name else name, @@ -1161,12 +1204,13 @@ def delete(name): @staticmethod # TODO: verify that this method works for all lengths etc - def create_member(name): + def create_member(name, tmphlq=None): """Create a data set member if the partitioned data set exists. Also used to overwrite a data set member if empty replacement is desired. Arguments: name (str) -- The data set name, including member name, to create. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetNotFoundError: If data set cannot be found. @@ -1174,7 +1218,7 @@ def create_member(name): """ module = AnsibleModuleHelper(argument_spec={}) base_dsname = name.split("(")[0] - if not base_dsname or not DataSet.data_set_cataloged(base_dsname): + if not base_dsname or not DataSet.data_set_cataloged(base_dsname, tmphlq=tmphlq): raise DatasetNotFoundError(name) tmp_file = tempfile.NamedTemporaryFile(delete=True) rc, stdout, stderr = module.run_command( @@ -1199,26 +1243,28 @@ def delete_member(name, force=False): raise DatasetMemberDeleteError(name, rc) @staticmethod - def catalog(name, volumes): + def catalog(name, volumes, tmphlq=None): """Catalog an uncataloged data set Arguments: name (str) -- The name of the data set to catalog. volumes (list[str]) -- The volume(s) the data set resides on. + tmphlq (str) -- High Level Qualifier for temporary datasets. """ - if DataSet.is_vsam(name, volumes): - DataSet._catalog_vsam(name, volumes) + if DataSet.is_vsam(name, volumes, tmphlq=tmphlq): + DataSet._catalog_vsam(name, volumes, tmphlq=tmphlq) else: - DataSet._catalog_non_vsam(name, volumes) + DataSet._catalog_non_vsam(name, volumes, tmphlq=tmphlq) @staticmethod # TODO: extend for multi volume data sets - def _catalog_non_vsam(name, volumes): + def _catalog_non_vsam(name, volumes, tmphlq=None): """Catalog a non-VSAM data set. Arguments: name (str) -- The data set to catalog. volumes (str) -- The volume(s) the data set resides on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetCatalogError: When attempt at catalog fails. @@ -1227,21 +1273,27 @@ def _catalog_non_vsam(name, volumes): iehprogm_input = DataSet._build_non_vsam_catalog_command( name.upper(), volumes) + cmd = "mvscmdauth --pgm=iehprogm --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=iehprogm --sysprint=* --sysin=stdin", data=iehprogm_input, errors='replace' + cmd, data=iehprogm_input, errors='replace' ) + if rc != 0 or "NORMAL END OF TASK RETURNED" not in stdout: raise DatasetCatalogError(name, volumes, rc) return @staticmethod # TODO: extend for multi volume data sets - def _catalog_vsam(name, volumes): + def _catalog_vsam(name, volumes, tmphlq=None): """Catalog a VSAM data set. Arguments: name (str) -- The data set to catalog. volumes (str) -- The volume(s) the data set resides on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetCatalogError: When attempt at catalog fails. @@ -1255,8 +1307,8 @@ def _catalog_vsam(name, volumes): # In order to catalog a uncataloged data set, we can't rely on LISTCAT # so using the VTOC entries we can make some assumptions of if the data set # is indexed, linear etc. - ds_vtoc_data_entry = vtoc.get_data_set_entry(name + ".DATA", volumes[0]) - ds_vtoc_index_entry = vtoc.get_data_set_entry(name + ".INDEX", volumes[0]) + ds_vtoc_data_entry = vtoc.get_data_set_entry(name + ".DATA", volumes[0], tmphlq=tmphlq) + ds_vtoc_index_entry = vtoc.get_data_set_entry(name + ".INDEX", volumes[0], tmphlq=tmphlq) if ds_vtoc_data_entry and ds_vtoc_index_entry: data_set_type_vsam = "INDEXED" @@ -1276,8 +1328,10 @@ def _catalog_vsam(name, volumes): data_set_type_vsam, ) - command_rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command, errors='replace') + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + command_rc, stdout, stderr = module.run_command(cmd, data=command, errors='replace') if command_rc == 0: success = True @@ -1291,8 +1345,11 @@ def _catalog_vsam(name, volumes): "LINEAR", ) - command_rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command, errors='replace') + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + command_rc, stdout, stderr = module.run_command(cmd, data=command, errors='replace') if command_rc == 0: success = True @@ -1307,23 +1364,25 @@ def _catalog_vsam(name, volumes): return @staticmethod - def uncatalog(name): + def uncatalog(name, tmphlq=None): """Uncatalog a data set. Arguments: name (str) -- The name of the data set to uncatalog. + tmphlq (str) -- High Level Qualifier for temporary datasets. """ - if DataSet.is_vsam(name): - DataSet._uncatalog_vsam(name) + if DataSet.is_vsam(name, tmphlq=tmphlq): + DataSet._uncatalog_vsam(name, tmphlq=tmphlq) else: - DataSet._uncatalog_non_vsam(name) + DataSet._uncatalog_non_vsam(name, tmphlq=tmphlq) @staticmethod - def _uncatalog_non_vsam(name): + def _uncatalog_non_vsam(name, tmphlq=None): """Uncatalog a non-VSAM data set. Arguments: name (str) -- The name of the data set to uncatalog. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetUncatalogError: When uncataloging fails. @@ -1334,10 +1393,13 @@ def _uncatalog_non_vsam(name): try: temp_name = DataSet.create_temp(name.split(".")[0]) DataSet.write(temp_name, iehprogm_input) - rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=iehprogm --sysprint=* --sysin={0}".format( - temp_name), errors='replace' - ) + + cmd = "mvscmdauth --pgm=iehprogm --sysprint=* --sysin={0}".format(temp_name) + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + rc, stdout, stderr = module.run_command(cmd, errors='replace') + if rc != 0 or "NORMAL END OF TASK RETURNED" not in stdout: raise DatasetUncatalogError(name, rc) finally: @@ -1346,11 +1408,12 @@ def _uncatalog_non_vsam(name): return @staticmethod - def _uncatalog_vsam(name): + def _uncatalog_vsam(name, tmphlq=None): """Uncatalog a VSAM data set. Arguments: name (str) -- The name of the data set to uncatalog. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetUncatalogError: When uncatalog fails. @@ -1358,15 +1421,17 @@ def _uncatalog_vsam(name): module = AnsibleModuleHelper(argument_spec={}) idcams_input = DataSet._VSAM_UNCATALOG_COMMAND.format(name) - rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=idcams_input, errors='replace' - ) + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + rc, stdout, stderr = module.run_command(cmd, data=idcams_input, errors='replace') if rc != 0: raise DatasetUncatalogError(name, rc) @staticmethod - def is_vsam(name, volumes=None): + def is_vsam(name, volumes=None, tmphlq=None): """Determine a given data set is VSAM. If volume is not provided, then LISTCAT will be used to check data set info. If volume is provided, then VTOC will be used to check data set info. If not in VTOC @@ -1377,27 +1442,29 @@ def is_vsam(name, volumes=None): Keyword Arguments: volumes (list[str]) -- The name(s) of the volume(s). (default: (None)) + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If the data set is VSAM. """ if not volumes: - return DataSet._is_vsam_from_listcat(name) + return DataSet._is_vsam_from_listcat(name, tmphlq=tmphlq) # ? will multivolume data set have vtoc info for each volume? - return DataSet._is_vsam_from_vtoc(name, volumes[0]) + return DataSet._is_vsam_from_vtoc(name, volumes[0], tmphlq=tmphlq) @staticmethod - def _is_vsam_from_vtoc(name, volume): + def _is_vsam_from_vtoc(name, volume, tmphlq=None): """Use VTOC to determine if a given data set is VSAM. Arguments: name (str) -- The name of the data set. volume (str) -- The volume name whose table of contents will be searched. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If the data set is VSAM. """ - data_sets = vtoc.get_volume_entry(volume) + data_sets = vtoc.get_volume_entry(volume, tmphlq=tmphlq) vsam_name = name + ".DATA" data_set = vtoc.find_data_set_in_volume_output(vsam_name, data_sets) if data_set is None: @@ -1407,20 +1474,24 @@ def _is_vsam_from_vtoc(name, volume): return False @staticmethod - def _is_vsam_from_listcat(name): + def _is_vsam_from_listcat(name, tmphlq=None): """Use LISTCAT command to determine if a given data set is VSAM. Arguments: name (str) -- The name of the data set. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If the data set is VSAM. """ module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name.upper()) - rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' - ) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + rc, stdout, stderr = module.run_command(cmd, data=stdin, errors='replace') if re.search(r"^0CLUSTER[ ]+-+[ ]+" + name + r"[ ]*$", stdout, re.MULTILINE): return True return False @@ -1702,19 +1773,21 @@ def _build_volume_string_iehprogm(volumes): class DataSetUtils(object): - def __init__(self, data_set): + def __init__(self, data_set, tmphlq=None): """A standard utility to gather information about a particular data set. Note that the input data set is assumed to be cataloged. Arguments: data_set {str} -- Name of the input data set + tmphlq {str} -- High Level Qualifier for temporary datasets. """ self.module = AnsibleModuleHelper(argument_spec={}) self.data_set = data_set.upper() self.path = data_set self.is_uss_path = "/" in data_set self.ds_info = dict() + self.tmphlq = tmphlq if not self.is_uss_path: self.ds_info.update(self._gather_data_set_info()) @@ -1845,7 +1918,9 @@ def _gather_data_set_info(self): result = dict() self.data_set = self.data_set.upper().replace("\\", '') listds_rc, listds_out, listds_err = mvs_cmd.ikjeft01( - " LISTDS '{0}'".format(self.data_set), authorized=True + " LISTDS '{0}'".format(self.data_set), + authorized=True, + tmphlq=self.tmphlq ) if listds_rc == 0: @@ -2020,7 +2095,7 @@ def create(self, tmp_hlq=None, replace=True, force=False): } formatted_args = DataSet._build_zoau_args(**arguments) changed = False - if DataSet.data_set_exists(self.name): + if DataSet.data_set_exists(self.name, tmphlq=tmp_hlq): DataSet.delete(self.name) changed = True zoau_data_set = datasets.create(**formatted_args) @@ -2072,15 +2147,20 @@ def ensure_present(self, tmp_hlq=None, replace=False, force=False): self.set_state("present") return rc - def ensure_absent(self): + def ensure_absent(self, tmp_hlq=None): """Removes the data set. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.ensure_absent(self.name, self.volumes) + rc = DataSet.ensure_absent(self.name, self.volumes, tmphlq=tmp_hlq) if rc == 0: self.set_state("absent") return rc @@ -2096,53 +2176,73 @@ def delete(self): DataSet.ensure_absent(self.name, self.volumes) self.set_state("absent") - def ensure_cataloged(self): + def ensure_cataloged(self, tmp_hlq=None): """ Ensures the data set is cataloged, if not catalogs it. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.ensure_cataloged(name=self.name, volumes=self.volumes) + rc = DataSet.ensure_cataloged(name=self.name, volumes=self.volumes, tmphlq=tmp_hlq) self.is_cataloged = True return rc - def catalog(self): + def catalog(self, tmp_hlq=None): """Catalog the data set in question. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.catalog(self.name, self.volumes) + rc = DataSet.catalog(self.name, self.volumes, tmphlq=tmp_hlq) self.is_cataloged = True return rc - def ensure_uncataloged(self): + def ensure_uncataloged(self, tmp_hlq=None): """ Ensures the data set is uncataloged, if not catalogs it. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.ensure_uncataloged(self.name) + rc = DataSet.ensure_uncataloged(self.name, tmphlq=tmp_hlq) self.is_cataloged = False return rc - def uncatalog(self): + def uncatalog(self, tmp_hlq=None): """Uncatalog the data set in question. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.uncatalog(self.name) + rc = DataSet.uncatalog(self.name, tmphlq=tmp_hlq) self.is_cataloged = False return rc @@ -2201,20 +2301,22 @@ def ensure_absent(self, force): rc = DataSet.ensure_member_absent(self.name, force) return rc - def ensure_present(self, replace=None): + def ensure_present(self, replace=None, tmphlq=None): """ Make sure that the member is created or fail creating it. Parameters ---------- replace : bool Used to determine behavior when member already exists. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- int Indicates if changes were made. """ - rc = DataSet.ensure_member_present(self.name, replace) + rc = DataSet.ensure_member_present(self.name, replace, tmphlq=tmphlq) return rc diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 606a2a189..9bdac056a 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -162,7 +162,7 @@ def _validate_encoding(self, encoding): parsed_args = parser.parse_args({"encoding": encoding}) return parsed_args.get("encoding") - def listdsi_data_set(self, ds): + def listdsi_data_set(self, ds, tmphlq=None): """Invoke IDCAMS LISTCAT command to get the record length and space used to estimate the space used by the VSAM data set. @@ -170,6 +170,8 @@ def listdsi_data_set(self, ds): ---------- ds : str The VSAM data set to be checked. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -187,7 +189,11 @@ def listdsi_data_set(self, ds): reclen = 80 space_u = 1024 listcat_cmd = " LISTCAT ENT('{0}') ALL".format(ds) + cmd = "mvscmdauth --pgm=ikjeft01 --systsprt=stdout --systsin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = self.module.run_command(cmd, data=listcat_cmd, errors='replace') if rc: raise EncodeError(err) @@ -460,7 +466,7 @@ def uss_convert_encoding_prev(self, src, dest, from_code, to_code): return convert_rc def mvs_convert_encoding( - self, src, dest, from_code, to_code, src_type=None, dest_type=None + self, src, dest, from_code, to_code, src_type=None, dest_type=None, tmphlq=None ): """Convert the encoding of the data from 1) USS to MVS(PS, PDS/E VSAM) @@ -484,6 +490,8 @@ def mvs_convert_encoding( The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS). dest_type : str The output MVS data set type. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -507,11 +515,11 @@ def mvs_convert_encoding( temp_src = mkdtemp() rc, out, err = copy.copy_pds2uss(src, temp_src) if src_type == "KSDS": - reclen, space_u = self.listdsi_data_set(src.upper()) + reclen, space_u = self.listdsi_data_set(src.upper(), tmphlq=tmphlq) # RDW takes the first 4 bytes in the VB format, hence we need to add an extra buffer to the vsam max recl. reclen += 4 temp_ps = self.temp_data_set(reclen, space_u) - rc, out, err = copy.copy_vsam_ps(src.upper(), temp_ps) + rc, out, err = copy.copy_vsam_ps(src.upper(), temp_ps, tmphlq=tmphlq) temp_src_fo = NamedTemporaryFile() temp_src = temp_src_fo.name rc, out, err = copy.copy_ps2uss(temp_ps, temp_src) @@ -526,12 +534,12 @@ def mvs_convert_encoding( convert_rc = True else: if dest_type == "KSDS": - reclen, space_u = self.listdsi_data_set(dest.upper()) + reclen, space_u = self.listdsi_data_set(dest.upper(), tmphlq=tmphlq) # RDW takes the first 4 bytes or records in the VB format, hence we need to add an extra buffer to the vsam max recl. reclen += 4 temp_ps = self.temp_data_set(reclen, space_u) rc, out, err = copy.copy_uss2mvs(temp_dest, temp_ps, "PS") - rc, out, err = copy.copy_vsam_ps(temp_ps, dest.upper()) + rc, out, err = copy.copy_vsam_ps(temp_ps, dest.upper(), tmphlq=tmphlq) convert_rc = True elif dest_type == "PO": for (dir, subdir, files) in walk(temp_dest): diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index 560184477..cd27d83ec 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -174,7 +174,7 @@ def idcams(cmd, dds=None, authorized=False): return _run_mvs_command("IDCAMS", cmd.upper(), dds, authorized) -def ikjeft01(cmd, dds=None, authorized=False): +def ikjeft01(cmd, dds=None, authorized=False, tmphlq=None): """IKJEFT01 is the TSO/E program. You can use it whenever you wish to perform a TSO function within a batch job. It allows you to perform any TSO function. For a general list of all TSO functions, type TSO HELP. Additionally, @@ -191,13 +191,15 @@ def ikjeft01(cmd, dds=None, authorized=False): authorized : bool Whether the command should be run in authorized mode. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- tuple(int, str, str) A tuple of return code, stdout and stderr. """ - return _run_mvs_command("IKJEFT01", cmd, dds, authorized) + return _run_mvs_command("IKJEFT01", cmd, dds, authorized, tmphlq=tmphlq) def iehlist(cmd, dds=None, authorized=False): @@ -262,7 +264,7 @@ def adrdssu(cmd, dds=None, authorized=False): return _run_mvs_command("ADRDSSU", cmd, dds, authorized) -def _run_mvs_command(pgm, cmd, dd=None, authorized=False): +def _run_mvs_command(pgm, cmd, dd=None, authorized=False, tmphlq=None): """Run a particular MVS command. Parameters @@ -279,6 +281,8 @@ def _run_mvs_command(pgm, cmd, dd=None, authorized=False): authorized : bool Indicates whether the MVS program should run as authorized. (Default {False}) + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -296,6 +300,8 @@ def _run_mvs_command(pgm, cmd, dd=None, authorized=False): mvscmd = "mvscmd" if authorized: mvscmd += "auth" + if tmphlq: + mvscmd += " -Q={0}".format(tmphlq) mvscmd += " --pgm={0} --{1}=* --{2}=stdin".format(pgm, sysprint, sysin) if dd: for k, v in dd.items(): diff --git a/plugins/module_utils/vtoc.py b/plugins/module_utils/vtoc.py index 3cae4fd92..fbe457388 100644 --- a/plugins/module_utils/vtoc.py +++ b/plugins/module_utils/vtoc.py @@ -20,7 +20,7 @@ ) -def get_volume_entry(volume): +def get_volume_entry(volume, tmphlq=None): """Retrieve VTOC information for all data sets with entries on the volume. @@ -28,6 +28,8 @@ def get_volume_entry(volume): ---------- volume : str The name of the volume. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -43,7 +45,7 @@ def get_volume_entry(volume): stdin = " LISTVTOC FORMAT,VOL=3390={0}".format(volume.upper()) # dd = "SYS1.VVDS.V{0}".format(volume.upper()) dd = "{0},vol".format(volume.upper()) - stdout = _iehlist(dd, stdin) + stdout = _iehlist(dd, stdin, tmphlq=tmphlq) if stdout is None: return None data_sets = _process_output(stdout) @@ -52,7 +54,7 @@ def get_volume_entry(volume): return data_sets -def get_data_set_entry(data_set_name, volume): +def get_data_set_entry(data_set_name, volume, tmphlq=None): """Retrieve VTOC information for a single data set on a volume. @@ -62,6 +64,8 @@ def get_data_set_entry(data_set_name, volume): The name of the data set to retrieve information for. volume : str The name of the volume. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -69,7 +73,7 @@ def get_data_set_entry(data_set_name, volume): The information for the data set found in VTOC. """ data_set = None - data_sets = get_volume_entry(volume) + data_sets = get_volume_entry(volume, tmphlq=tmphlq) for ds in data_sets: if ds.get("data_set_name") == data_set_name.upper(): data_set = ds @@ -102,7 +106,7 @@ def find_data_set_in_volume_output(data_set_name, data_sets): return None -def _iehlist(dd, stdin): +def _iehlist(dd, stdin, tmphlq=None): """Calls IEHLIST program. Parameters @@ -111,6 +115,8 @@ def _iehlist(dd, stdin): Volume information to pass as DD statement. stdin : str Input to stdin. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -119,9 +125,15 @@ def _iehlist(dd, stdin): """ module = AnsibleModuleHelper(argument_spec={}) response = None + + cmd = "mvscmd --pgm=iehlist --sysprint=* --dd={0} --sysin=stdin ".format(dd) + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmd --pgm=iehlist --sysprint=* --dd={0} --sysin=stdin ".format(dd), - data=stdin, errors='replace' + cmd, + data=stdin, + errors='replace' ) if rc == 0: response = stdout diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index ceeea04de..19b81e0d3 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -343,7 +343,7 @@ def backupOper(module, src, backup, tmphlq=None): """ file_type = None if data_set.is_data_set(src): - file_type = data_set.DataSet.data_set_type(src) + file_type = data_set.DataSet.data_set_type(src, tmphlq=tmphlq) else: if os.path.exists(src): file_type = 'USS' diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 52fdd9585..026c56188 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1018,6 +1018,7 @@ def __init__(self, module): High level qualifier for temporary datasets. """ super(MVSArchive, self).__init__(module) + self.tmphlq = module.params.get("tmp_hlq") self.original_checksums = self.dest_checksums() self.use_adrdssu = module.params.get("format").get("format_options").get("use_adrdssu") self.expanded_sources = self.expand_mvs_paths(self.sources) @@ -1026,7 +1027,6 @@ def __init__(self, module): self.tmp_data_sets = list() self.dest_data_set = module.params.get("dest_data_set") self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set - self.tmphlq = module.params.get("tmp_hlq") def open(self): pass @@ -1038,7 +1038,7 @@ def find_targets(self): """Finds target datasets in host. """ for path in self.sources: - if data_set.DataSet.data_set_exists(path): + if data_set.DataSet.data_set_exists(path, tmphlq=self.tmphlq): self.targets.append(path) else: self.not_found.append(path) @@ -1148,7 +1148,7 @@ def create_dest_ds(self, name): Name of the newly created data set. """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH - data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) + data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length, tmphlq=self.tmphlq) # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) # rc, out, err = self.module.run_command(cmd) @@ -1266,7 +1266,7 @@ def dest_exists(self): bool If destination path exists. """ - return data_set.DataSet.data_set_exists(self.dest) + return data_set.DataSet.data_set_exists(self.dest, tmphlq=self.tmphlq) def remove_targets(self): """Removes the archived targets and changes the state accordingly. diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index dd388bc8e..ef4abd68f 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -775,7 +775,7 @@ def main(): if data_set.DataSet.is_gds_relative_name(src): module.fail_json(msg="{0} does not exist".format(src)) - ds_utils = data_set.DataSetUtils(src) + ds_utils = data_set.DataSetUtils(src, tmphlq=tmphlq) if not ds_utils.exists(): message = "{0} does NOT exist".format(str(src)) module.fail_json(msg=message) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 3c61e40c7..dd2e724cc 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -967,6 +967,7 @@ def __init__( asa_text=False, backup_name=None, force_lock=False, + tmphlq=None ): """Utility class to handle copying data between two targets. @@ -995,6 +996,8 @@ def __init__( Whether the dest data set should be copied into using disp=shr when is opened by another process. + tmphlq : str + High Level Qualifier for temporary datasets. Attributes ---------- @@ -1018,6 +1021,8 @@ def __init__( Whether the dest data set should be copied into using disp=shr when is opened by another process. + tmphlq : str + High Level Qualifier for temporary datasets. """ self.module = module self.is_binary = is_binary @@ -1026,6 +1031,7 @@ def __init__( self.aliases = aliases self.backup_name = backup_name self.force_lock = force_lock + self.tmphlq = tmphlq def run_command(self, cmd, **kwargs): """Wrapper for AnsibleModule.run_command. @@ -1077,7 +1083,7 @@ def copy_to_seq( copy_args["options"] = "" if src_type == 'USS' and self.asa_text: - response = copy.copy_asa_uss2mvs(new_src, dest) + response = copy.copy_asa_uss2mvs(new_src, dest, tmphlq=self.tmphlq) if response.rc != 0: raise CopyOperationError( @@ -1487,6 +1493,7 @@ def __init__( aliases=False, common_file_args=None, backup_name=None, + tmphlq=None ): """Utility class to handle copying files or data sets to USS target. @@ -1505,6 +1512,8 @@ def __init__( Whether the file to be copied contains binary data. backup_name : str The USS path or data set name of destination backup. + tmphlq : str + High Level Qualifier for temporary datasets. Attributes ---------- @@ -1518,7 +1527,8 @@ def __init__( executable=executable, asa_text=asa_text, aliases=aliases, - backup_name=backup_name + backup_name=backup_name, + tmphlq=tmphlq ) self.common_file_args = common_file_args @@ -1865,7 +1875,7 @@ def _mvs_copy_to_uss( try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: if self.asa_text: - response = copy.copy_asa_mvs2uss(src, dest) + response = copy.copy_asa_mvs2uss(src, dest, tmphlq=self.tmphlq) rc = response.rc elif self.executable: try: @@ -1911,7 +1921,7 @@ def _mvs_copy_to_uss( stderr=copy_exception.response.stderr_response ) elif self.asa_text: - response = copy.copy_asa_pds2uss(src, dest) + response = copy.copy_asa_pds2uss(src, dest, tmphlq=self.tmphlq) if response.rc != 0: raise CopyOperationError( @@ -1943,6 +1953,7 @@ def __init__( asa_text=False, backup_name=None, force_lock=False, + tmphlq=None ): """ Utility class to handle copying to partitioned data sets or partitioned data set members. @@ -1960,6 +1971,8 @@ def __init__( binary data. backup_name : str The USS path or data set name of destination backup. + tmphlq : str + High Level Qualifier for temporary datasets. """ super().__init__( module, @@ -1968,7 +1981,8 @@ def __init__( aliases=aliases, asa_text=asa_text, backup_name=backup_name, - force_lock=force_lock + force_lock=force_lock, + tmphlq=tmphlq ) def copy_to_pdse( @@ -2140,7 +2154,7 @@ def copy_to_member( opts["options"] = "" if src_type == 'USS' and self.asa_text: - response = copy.copy_asa_uss2mvs(src, dest) + response = copy.copy_asa_uss2mvs(src, dest, tmphlq=self.tmphlq) rc, out, err = response.rc, response.stdout_response, response.stderr_response else: # While ASA files are just text files, we do a binary copy @@ -2334,7 +2348,8 @@ def create_seq_dataset_from_file( is_binary, asa_text, record_length=None, - volume=None + volume=None, + tmphlq=None ): """Creates a new sequential dataset with attributes suitable to copy the contents of a file into it. @@ -2353,6 +2368,8 @@ def create_seq_dataset_from_file( Whether the file has ASA control characters. volume : str, optional Volume where the data set should be. + tmphlq : str + High Level Qualifier for temporary datasets. """ src_size = os.stat(file).st_size # record_format = record_length = None @@ -2387,7 +2404,7 @@ def create_seq_dataset_from_file( volume=volume ) - data_set.DataSet.ensure_present(replace=force, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) def backup_data(ds_name, ds_type, backup_name, tmphlq=None): @@ -2601,7 +2618,8 @@ def does_destination_allow_copy( dest_type, is_uss, force, - volume=None + volume=None, + tmphlq=None ): """Checks whether or not the module can copy into the destination specified. @@ -2626,6 +2644,8 @@ def does_destination_allow_copy( Whether or not the module can replace existing destinations. volume : str, optional Volume where the destination should be. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -2644,7 +2664,7 @@ def does_destination_allow_copy( # If the destination is a sequential or VSAM data set and is empty, the module will try to use it, # otherwise, force needs to be True to continue and replace it. if (dest_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_VSAM) and dest_exists: - is_dest_empty = data_set.DataSet.is_empty(dest, volume) + is_dest_empty = data_set.DataSet.is_empty(dest, volume, tmphlq=tmphlq) if not (is_dest_empty or force): return False @@ -2832,7 +2852,8 @@ def allocate_destination_data_set( is_gds, is_active_gds, dest_data_set=None, - volume=None + volume=None, + tmphlq=None ): """ Allocates a new destination data set to copy into, erasing a preexistent one if @@ -2867,6 +2888,8 @@ def allocate_destination_data_set( of the new data set; they will take precedence over any other allocation logic. volume : str, optional Volume where the data set should be allocated into. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -2920,20 +2943,20 @@ def allocate_destination_data_set( del dest_params["purge"] del dest_params["extended"] del dest_params["fifo"] - data_set.DataSet.ensure_present(replace=force, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_SEQ: volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) if src_ds_type == "USS": # Taking the temp file when a local file was copied with sftp. - create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume) + create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume, tmphlq=tmphlq) elif src_ds_type in data_set.DataSet.MVS_SEQ: # Only applying the GDS special case when we don't have an absolute name. if is_gds and not is_active_gds: data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume, tmphlq=tmphlq) else: temp_dump = None try: @@ -2949,7 +2972,8 @@ def allocate_destination_data_set( is_binary, asa_text, record_length=record_length, - volume=volume + volume=volume, + tmphlq=tmphlq ) finally: if temp_dump: @@ -2961,7 +2985,7 @@ def allocate_destination_data_set( if is_gds and not is_active_gds: data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume, tmphlq=tmphlq) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.list_datasets(src_name)[0] # The size returned by listing is in bytes. @@ -2978,7 +3002,7 @@ def allocate_destination_data_set( type="PDSE", volume=volume ) - data_set.DataSet.ensure_present(replace=force, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): # This is almost the same as allocating a sequential dataset. @@ -3036,13 +3060,13 @@ def allocate_destination_data_set( volume=volume ) - data_set.DataSet.ensure_present(replace=force, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_VSAM: # If dest_data_set is not available, always create the destination using the src VSAM # as a model. volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume, tmphlq=tmphlq) elif dest_ds_type == "GDG": src_view = gdgs.GenerationDataGroupView(src) @@ -3360,11 +3384,11 @@ def run_module(module, arg_def): copy_handler = CopyHandler(module, is_binary=is_binary) copy_handler._tag_file_encoding(converted_src, "UTF-8") else: - if (is_src_gds and data_set.DataSet.data_set_exists(src)) or ( - not is_src_gds and data_set.DataSet.data_set_exists(src_name)): + if (is_src_gds and data_set.DataSet.data_set_exists(src, tmphlq=tmphlq)) or ( + not is_src_gds and data_set.DataSet.data_set_exists(src_name, tmphlq=tmphlq)): if src_member and not data_set.DataSet.data_set_member_exists(src): raise NonExistentSourceError(src) - src_ds_type = data_set.DataSet.data_set_type(src_name) + src_ds_type = data_set.DataSet.data_set_type(src_name, tmphlq=tmphlq) if src_ds_type not in data_set.DataSet.MVS_VSAM and src_ds_type != "GDG": src_attributes = datasets.list_datasets(src_name)[0] @@ -3403,8 +3427,8 @@ def run_module(module, arg_def): if dest_exists and not os.access(dest, os.W_OK): module.fail_json(msg="Destination {0} is not writable".format(raw_dest)) else: - dest_exists = data_set.DataSet.data_set_exists(dest_name, volume) - dest_ds_type = data_set.DataSet.data_set_type(dest_name, volume) + dest_exists = data_set.DataSet.data_set_exists(dest_name, volume, tmphlq=tmphlq) + dest_ds_type = data_set.DataSet.data_set_type(dest_name, volume, tmphlq=tmphlq) # When dealing with a new generation, we'll override its type to None # so it will be the same type as the source (or whatever dest_data_set has) @@ -3597,7 +3621,8 @@ def run_module(module, arg_def): dest_ds_type, is_uss, force, - volume + volume, + tmphlq ): module.fail_json( msg="{0} already exists on the system, unable to overwrite unless force=True is specified.".format(raw_dest), @@ -3627,7 +3652,8 @@ def run_module(module, arg_def): is_dest_gds, is_dest_gds_active, dest_data_set=dest_data_set, - volume=volume + volume=volume, + tmphlq=tmphlq ) except Exception as err: if converted_src: @@ -3655,6 +3681,7 @@ def run_module(module, arg_def): asa_text=asa_text, backup_name=backup_name, force_lock=force_lock, + tmphlq=tmphlq ) try: @@ -3677,6 +3704,7 @@ def run_module(module, arg_def): aliases=aliases, common_file_args=dict(mode=mode, group=group, owner=owner), backup_name=backup_name, + tmphlq=tmphlq ) original_checksum = None @@ -3744,6 +3772,7 @@ def run_module(module, arg_def): aliases=aliases, backup_name=backup_name, force_lock=force_lock, + tmphlq=tmphlq ) pdse_copy_handler.copy_to_pdse( diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 97bc107fd..79a85dac3 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1404,7 +1404,9 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): If changes were made. """ changed = False - if state == "present" and data_set.data_set_type in ["member", "gdg"]: + if state == "present" and data_set.data_set_type == "member": + changed = data_set.ensure_present(replace=replace, tmphlq=tmp_hlq) + elif state == "present" and data_set.data_set_type == "gdg": changed = data_set.ensure_present(replace=replace) elif state == "present": changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq, force=force) @@ -1413,11 +1415,11 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): elif state == "absent" and data_set.data_set_type == "gdg": changed = data_set.ensure_absent(force=force) elif state == "absent": - changed = data_set.ensure_absent() + changed = data_set.ensure_absent(tmp_hlq=tmp_hlq) elif state == "cataloged": - changed = data_set.ensure_cataloged() + changed = data_set.ensure_cataloged(tmp_hlq=tmp_hlq) elif state == "uncataloged": - changed = data_set.ensure_uncataloged() + changed = data_set.ensure_uncataloged(tmp_hlq=tmp_hlq) return changed diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index a17fcb7ed..fb6fdfac5 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -343,13 +343,15 @@ def check_pds_member(ds, mem): return check_rc -def check_mvs_dataset(ds): +def check_mvs_dataset(ds, tmphlq=None): """To call data_set utils to check if the MVS data set exists or not. Parameters ---------- ds : str Data set name. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -365,26 +367,28 @@ def check_mvs_dataset(ds): """ check_rc = False ds_type = None - if not data_set.DataSet.data_set_exists(ds): + if not data_set.DataSet.data_set_exists(ds, tmphlq=tmphlq): raise EncodeError( "Data set {0} is not cataloged, please check data set provided in" "the src option.".format(ds) ) else: check_rc = True - ds_type = data_set.DataSetUtils(ds).ds_type() + ds_type = data_set.DataSetUtils(ds, tmphlq=tmphlq).ds_type() if not ds_type: raise EncodeError("Unable to determine data set type of {0}".format(ds)) return check_rc, ds_type -def check_file(file): +def check_file(file, tmphlq=None): """Check file is a USS file or an MVS data set. Parameters ---------- file : str File to check. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -406,7 +410,7 @@ def check_file(file): if "(" in ds: dsn = ds[: ds.rfind("(", 1)] mem = "".join(re.findall(r"[(](.*?)[)]", ds)) - rc, ds_type = check_mvs_dataset(dsn) + rc, ds_type = check_mvs_dataset(dsn, tmphlq=tmphlq) if rc: if ds_type == "PO": is_mvs = check_pds_member(dsn, mem) @@ -416,7 +420,7 @@ def check_file(file): "Data set {0} is not a partitioned data set".format(dsn) ) else: - is_mvs, ds_type = check_mvs_dataset(ds) + is_mvs, ds_type = check_mvs_dataset(ds, tmphlq=tmphlq) return is_uss, is_mvs, ds_type @@ -540,9 +544,12 @@ def run_module(): dest_exists = False if not is_name_member: - dest_exists = data_set.DataSet.data_set_exists(src_data_set.name) + dest_exists = data_set.DataSet.data_set_exists(src_data_set.name, tmphlq=tmphlq) else: - dest_exists = data_set.DataSet.data_set_exists(data_set.extract_dsname(src_data_set.name)) + dest_exists = data_set.DataSet.data_set_exists( + data_set.extract_dsname(src_data_set.name), + tmphlq=tmphlq + ) if not dest_exists: raise EncodeError( @@ -558,7 +565,7 @@ def run_module(): )) ds_type_src = "PS" else: - ds_type_src = data_set.DataSet.data_set_type(src_data_set.name) + ds_type_src = data_set.DataSet.data_set_type(src_data_set.name, tmphlq=tmphlq) if not ds_type_src: raise EncodeError("Unable to determine data set type of {0}".format(src_data_set.raw_name)) @@ -585,9 +592,12 @@ def run_module(): is_name_member = data_set.is_member(dest_data_set.name) if not is_name_member: - dest_exists = data_set.DataSet.data_set_exists(dest_data_set.name) + dest_exists = data_set.DataSet.data_set_exists(dest_data_set.name, tmphlq=tmphlq) else: - dest_exists = data_set.DataSet.data_set_exists(data_set.extract_dsname(dest_data_set.name)) + dest_exists = data_set.DataSet.data_set_exists( + data_set.extract_dsname(dest_data_set.name), + tmphlq=tmphlq + ) if not dest_exists: raise EncodeError( @@ -598,7 +608,7 @@ def run_module(): if is_name_member: ds_type_dest = "PS" else: - ds_type_dest = data_set.DataSet.data_set_type(dest_data_set.name) + ds_type_dest = data_set.DataSet.data_set_type(dest_data_set.name, tmphlq=tmphlq) if (not is_uss_dest) and (path.sep in dest): try: @@ -671,6 +681,7 @@ def run_module(): to_encoding, src_type=ds_type_src, dest_type=ds_type_dest, + tmphlq=tmphlq ) if convert_rc: diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 92f1086fd..cd00b4216 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -885,6 +885,7 @@ def run_module(): fail_on_missing = boolean(parsed_args.get("fail_on_missing")) is_binary = boolean(parsed_args.get("is_binary")) encoding = module.params.get("encoding") + tmphlq = module.params.get("tmp_hlq") # ********************************************************** # # Check for data set existence and determine its type # @@ -906,7 +907,8 @@ def run_module(): src_exists = data_set.DataSet.data_set_member_exists(src_data_set.name) else: src_exists = data_set.DataSet.data_set_exists( - src_data_set.name + src_data_set.name, + tmphlq=tmphlq ) if not src_exists: @@ -936,7 +938,10 @@ def run_module(): if "/" in src: ds_type = "USS" else: - ds_type = data_set.DataSet.data_set_type(data_set.extract_dsname(src_data_set.name)) + ds_type = data_set.DataSet.data_set_type( + data_set.extract_dsname(src_data_set.name), + tmphlq=tmphlq + ) if not ds_type: module.fail_json(msg="Unable to determine source type. No data was fetched.") diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index d299f9cf3..ca9972e13 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -674,7 +674,7 @@ def main(): if data_set.DataSet.is_gds_relative_name(src) and is_gds is False: module.fail_json(msg="{0} does not exist".format(src)) - ds_utils = data_set.DataSetUtils(src) + ds_utils = data_set.DataSetUtils(src, tmphlq=tmphlq) # Check if dest/src exists if not ds_utils.exists(): diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 85f4638aa..0b1377d31 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -587,7 +587,7 @@ def mt_backupOper(module, src, backup, tmphlq=None): Data set type is NOT supported. """ # analysis the file type - ds_utils = data_set.DataSetUtils(src) + ds_utils = data_set.DataSetUtils(src, tmphlq=tmphlq) file_type = ds_utils.ds_type() if file_type != "USS" and file_type not in mt_DS_TYPE: message = "{0} data set type is NOT supported".format(str(file_type)) @@ -818,7 +818,7 @@ def run_module(module, arg_def): ) # data set to be mounted/unmounted must exist - fs_du = data_set.DataSetUtils(src) + fs_du = data_set.DataSetUtils(src, tmphlq=tmphlq) fs_exists = fs_du.exists() if fs_exists is False: module.fail_json( @@ -1033,7 +1033,7 @@ def run_module(module, arg_def): stderr = "Mount called on data set that is already mounted.\n" if write_persistent and module.check_mode is False: - fst_du = data_set.DataSetUtils(data_store) + fst_du = data_set.DataSetUtils(data_store, tmphlq=tmphlq) fst_exists = fst_du.exists() if fst_exists is False: module.fail_json( diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 014d204a6..d1d9952ad 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -783,7 +783,7 @@ def _restore(self, source): return rc def src_exists(self): - return data_set.DataSet.data_set_exists(self.src) + return data_set.DataSet.data_set_exists(self.src, tmphlq=self.tmphlq) def _get_restored_datasets(self, output): ds_list = list() diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 32b721cfb..63f110ee7 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -23,6 +23,7 @@ from re import search, IGNORECASE, MULTILINE import string import random +import time from ibm_zos_core.tests.helpers.utils import get_random_file_name DATA_SET_CONTENTS = "HELLO WORLD" From fcd8a1ada341a52d6484a8d036a32fdc70943f99 Mon Sep 17 00:00:00 2001 From: Rich Parker <richp405@gmail.com> Date: Mon, 7 Oct 2024 12:09:39 -0400 Subject: [PATCH 473/495] Update mounts.env (#1697) * Update mounts.env re-aligned mounts, added 1.2.5.10, corrected 1.2.5.8 naming. * Update mounts.env forgot to remove the old 1.0.3-ptf2 mount. --- scripts/mounts.env | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/mounts.env b/scripts/mounts.env index f8ae98a25..fb3cfbbe2 100644 --- a/scripts/mounts.env +++ b/scripts/mounts.env @@ -28,15 +28,15 @@ # data_set - the z/OS data set containing the binaries to mount # space - must be a space before the closing quote # ------------------------------------------------------------------------------ -zoau_mount_list_str="1:1.0.3-ptf2:/zoau/v1.0.3-ptf2:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ -"2:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ -"3:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ -"4:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ -"5:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ -"6:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ -"7:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ -"8:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ -"9:1.2.5:/zoau/v1.2.5:IMSTESTU.ZOAU.V102.GA.ZFS "\ +zoau_mount_list_str="1:1.1.0-ga:/zoau/v1.1.0-ga:IMSTESTU.ZOAU.V110.GA.ZFS "\ +"2:1.1.1-ptf1:/zoau/v1.1.1-ptf1:IMSTESTU.ZOAU.V111.PTF1.ZFS "\ +"3:1.2.0:/zoau/v1.2.0:IMSTESTU.ZOAU.V120.ZFS "\ +"4:1.2.1:/zoau/v1.2.1:IMSTESTU.ZOAU.V121.ZFS "\ +"5:1.2.2:/zoau/v1.2.2:IMSTESTU.ZOAU.V122.ZFS "\ +"6:1.2.3:/zoau/v1.2.3:IMSTESTU.ZOAU.V123.ZFS "\ +"7:1.2.4:/zoau/v1.2.4:IMSTESTU.ZOAU.V124.ZFS "\ +"8:1.2.5.8:/zoau/v1.2.5.8:IMSTESTU.ZOAU.V102.GA.ZFS "\ +"9:1.2.5.10:/zoau/v1.2.5.10:IMSTESTU.ZOAU.V103.PTF2.ZFS "\ "10:1.3.0:/zoau/v1.3.0:IMSTESTU.ZOAU.V103.GA5.ZFS "\ "11:1.3.1:/zoau/v1.3.1:IMSTESTU.ZOAU.V130.ZFS "\ "12:1.3.2:/zoau/v1.3.2.0:IMSTESTU.ZOAU.V100.GA.ZFS "\ From 5bb38fc06c48c120300291ecc99fd762cc3bafe3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 15 Oct 2024 10:42:20 -0600 Subject: [PATCH 474/495] [Bugfix][zos_copy] Honor remote_tmp variable from ansible.cfg when creating temporary files (#1739) * Fix tmp to .ansible/tmp * Fix copy use tmp better for .ansible/tmp * Add fragment * Added new implementation of temporary directory update on copying to remote * Make action plugin and module remove their temporary directories * Remove a misleading comment * Added tmp_dir attribute definition to class * Fixed error for when copying from src that is more than 80 lenght and dest is asa would fail * Replaced environment from TMP to TMPDIR as this is the tmpdir used in USS * Update 1739-tmp_files_not_use_tmp_folder.yml * Remove use of expanduser in favor of realpath * Removed commented line, changed TMP env var to TMPDIR to comply with uss temporary directories and used os.path.join in favor of just concatenating strings * Update 1739-tmp_files_not_use_tmp_folder.yml --------- Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- .../1739-tmp_files_not_use_tmp_folder.yml | 5 +++++ plugins/action/zos_copy.py | 17 ++++++++++++++--- plugins/module_utils/copy.py | 6 ++++-- plugins/modules/zos_copy.py | 19 +++++++++++-------- 4 files changed, 34 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/1739-tmp_files_not_use_tmp_folder.yml diff --git a/changelogs/fragments/1739-tmp_files_not_use_tmp_folder.yml b/changelogs/fragments/1739-tmp_files_not_use_tmp_folder.yml new file mode 100644 index 000000000..6cf07266b --- /dev/null +++ b/changelogs/fragments/1739-tmp_files_not_use_tmp_folder.yml @@ -0,0 +1,5 @@ +bugfixes: + - zos_copy - Previously, the module ignored the value of ``remote_tmp`` set in Ansible configuration file + and used the ``/tmp/`` directory. Fix now uses the value of ``remote_tmp`` or the default value ``~/.ansible/tmp`` + if none is given. + (https://github.com/ansible-collections/ibm_zos_core/pull/1739). diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 8561045e5..a276ce651 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -18,7 +18,7 @@ import time import shutil -from tempfile import mkstemp, gettempprefix +from tempfile import mkstemp from ansible.errors import AnsibleError from ansible.module_utils._text import to_text @@ -71,6 +71,8 @@ def run(self, tmp=None, task_vars=None): is_src_dir = False temp_path = is_uss = None + self.tmp_dir = None + if dest: if not isinstance(dest, string_types): msg = "Invalid type supplied for 'dest' option, it must be a string" @@ -267,6 +269,10 @@ def run(self, tmp=None, task_vars=None): # Erasing all rendered Jinja2 templates from the controller. if template_dir: shutil.rmtree(template_dir, ignore_errors=True) + # Remove temporary directory from remote + if self.tmp_dir is not None: + path = os.path.normpath(f"{self.tmp_dir}/ansible-zos-copy") + self._connection.exec_command(f"rm -rf {path}*") if copy_res.get("note") and not force: result["note"] = copy_res.get("note") @@ -293,8 +299,13 @@ def run(self, tmp=None, task_vars=None): def _copy_to_remote(self, src, is_dir=False, ignore_stderr=False): """Copy a file or directory to the remote z/OS system """ - - temp_path = "/{0}/{1}/{2}".format(gettempprefix(), _create_temp_path_name(), os.path.basename(src)) + self.tmp_dir = self._connection._shell._options.get("remote_tmp") + rc, stdout, stderr = self._connection.exec_command("cd {0} && pwd".format(self.tmp_dir)) + if rc > 0: + msg = f"Failed to resolve remote temporary directory {self.tmp_dir}. Ensure that the directory exists and user has proper access." + return self._exit_action({}, msg, failed=True) + self.tmp_dir = stdout.decode("utf-8").replace("\r", "").replace("\n", "") + temp_path = os.path.join(self.tmp_dir, _create_temp_path_name(), os.path.basename(src)) self._connection.exec_command("mkdir -p {0}".format(os.path.dirname(temp_path))) _src = src.replace("#", "\\#") _sftp_action = 'put' diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index f8b37b514..5336a90d7 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -401,8 +401,10 @@ def copy_asa_uss2mvs(src, dest, tmphlq=None): str The stderr after the copy command executed successfully. """ - oget_cmd = "OGET '{0}' '{1}'".format(src, dest) - rc, out, err = ikjeft01(oget_cmd, authorized=True, tmphlq=tmphlq) + + module = AnsibleModuleHelper(argument_spec={}) + oget_cmd = f"tsocmd \" OGET '{src}' '{dest}' \"" + rc, out, err = module.run_command(oget_cmd) return TSOCmdResponse(rc, out, err) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index dd2e724cc..3629962fd 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1283,7 +1283,7 @@ def convert_encoding(self, src, encoding, remote_src): if os.path.isdir(new_src): try: if remote_src: - temp_dir = tempfile.mkdtemp() + temp_dir = tempfile.mkdtemp(prefix=os.environ['TMPDIR']) shutil.copytree(new_src, temp_dir, dirs_exist_ok=True) new_src = temp_dir @@ -1301,7 +1301,7 @@ def convert_encoding(self, src, encoding, remote_src): else: try: if remote_src: - fd, temp_src = tempfile.mkstemp() + fd, temp_src = tempfile.mkstemp(dir=os.environ['TMPDIR']) os.close(fd) shutil.copy(new_src, temp_src) new_src = temp_src @@ -1464,7 +1464,7 @@ def create_temp_with_lf_endings(self, src): If the conversion fails. """ try: - fd, converted_src = tempfile.mkstemp() + fd, converted_src = tempfile.mkstemp(dir=os.environ['TMPDIR']) os.close(fd) with open(converted_src, "wb") as converted_file: @@ -2230,7 +2230,7 @@ def dump_data_set_member_to_file(data_set_member, is_binary): DataSetMemberAttributeError When the call to dcp fails. """ - fd, temp_path = tempfile.mkstemp() + fd, temp_path = tempfile.mkstemp(dir=os.environ['TMPDIR']) os.close(fd) copy_args = dict() @@ -2719,7 +2719,7 @@ def get_file_checksum(src): def cleanup(src_list): """Remove all files or directories listed in src_list. Also perform - additional cleanup of the /tmp directory. + additional cleanup of the tmp directory. Parameters ---------- @@ -2727,7 +2727,7 @@ def cleanup(src_list): A list of file paths. """ module = AnsibleModuleHelper(argument_spec={}) - tmp_prefix = tempfile.gettempprefix() + tmp_prefix = os.environ['TMPDIR'] tmp_dir = os.path.realpath("/" + tmp_prefix) dir_list = glob.glob(tmp_dir + "/ansible-zos-copy-payload*") conv_list = glob.glob(tmp_dir + "/converted*") @@ -3140,7 +3140,7 @@ def normalize_line_endings(src, encoding=None): src_tag = encoding["from"] if src_tag != "IBM-037": - fd, converted_src = tempfile.mkstemp() + fd, converted_src = tempfile.mkstemp(dir=os.environ['TMPDIR']) os.close(fd) enc_utils.uss_convert_encoding( @@ -3268,6 +3268,9 @@ def run_module(module, arg_def): force_lock = module.params.get('force_lock') content = module.params.get('content') + # Set temporary directory at os environment level + os.environ['TMPDIR'] = f"{os.path.realpath(module.tmpdir)}/" + dest_data_set = module.params.get('dest_data_set') if dest_data_set: if volume: @@ -3371,7 +3374,7 @@ def run_module(module, arg_def): src_tag = encode.Defaults.get_default_system_charset() # Converting the original src to a temporary one in UTF-8. - fd, converted_src = tempfile.mkstemp() + fd, converted_src = tempfile.mkstemp(dir=os.environ['TMPDIR']) os.close(fd) encode_utils.uss_convert_encoding( new_src, From 8f823ffcd923eaf365b558b23ef1a6f13ccd2685 Mon Sep 17 00:00:00 2001 From: Fernando Flores <fernandofloresdev@gmail.com> Date: Wed, 23 Oct 2024 07:56:17 -0600 Subject: [PATCH 475/495] [v1.12.0][Bug]Return system and subsystem in zos_job_query (#1761) * Return subsystem and system in values * Added changelog --- .../fragments/1761-system-subsystem-job_query.yml | 3 +++ plugins/module_utils/job.py | 2 -- plugins/modules/zos_job_query.py | 10 ++++++++++ tests/functional/modules/test_zos_job_query_func.py | 2 ++ 4 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 changelogs/fragments/1761-system-subsystem-job_query.yml diff --git a/changelogs/fragments/1761-system-subsystem-job_query.yml b/changelogs/fragments/1761-system-subsystem-job_query.yml new file mode 100644 index 000000000..36d8abb02 --- /dev/null +++ b/changelogs/fragments/1761-system-subsystem-job_query.yml @@ -0,0 +1,3 @@ +bugfixes: + - zos_job_query - Module was not returning values for system and subsystem. Fix now returns these values. + (https://github.com/ansible-collections/ibm_zos_core/pull/1761). diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d9444947f..da2027e48 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -240,7 +240,6 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): job_id=job_id, owner=owner, job_name=job_name, - dd_scan=False ) if len(job_status_result) == 0: @@ -252,7 +251,6 @@ def job_status(job_id=None, owner=None, job_name=None, dd_name=None): job_id=job_id, owner=owner, job_name=job_name, - dd_scan=False ) return job_status_result diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 328426ada..110c3554e 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -124,6 +124,16 @@ Type of address space used by the job. type: str sample: STC + system: + description: + The job entry system that MVS uses to do work. + type: str + sample: STL1 + subsystem: + description: + The job entry subsystem that MVS uses to do work. + type: str + sample: STL1 ret_code: description: Return code output collected from job log. diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index d34aeda4d..a7c813fad 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -93,6 +93,8 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): qresults = hosts.all.zos_job_query(job_id=jobmask) for qresult in qresults.contacted.values(): assert qresult.get("jobs") is not None + assert qresult.get("jobs")[0].get("system") is not None + assert qresult.get("jobs")[0].get("subsystem") is not None finally: hosts.all.file(path=temp_path, state="absent") From 70e4e986c1390fe014fdbfd9231b905336558016 Mon Sep 17 00:00:00 2001 From: Demetri <dimatos@gmail.com> Date: Wed, 23 Oct 2024 08:52:59 -0700 Subject: [PATCH 476/495] =?UTF-8?q?Correct=20zos=5Fcopy=20handling=20of=20?= =?UTF-8?q?a=20zoauresponse=20during=20opercmd=20usage=20for=20=E2=80=A6?= =?UTF-8?q?=20(#1766)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Correct zos_copy handling of a zoauresponse during opercmd usage for locked data sets. (#1744) * Test case updates to test opercmd authentication Signed-off-by: ddimatos <dimatos@gmail.com> * Mock test to use a enum class to selected a managed user Signed-off-by: ddimatos <dimatos@gmail.com> * add a users.py helper class with racf commands and users Signed-off-by: ddimatos <dimatos@gmail.com> * RACF updates Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to create user Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to fix creating racf user Signed-off-by: ddimatos <dimatos@gmail.com> * Added support for other user types Signed-off-by: ddimatos <dimatos@gmail.com> * Update user.py to controll user access Signed-off-by: ddimatos <dimatos@gmail.com> * Updated user.py with delete function Signed-off-by: ddimatos <dimatos@gmail.com> * Upudates to change the original get new user design Signed-off-by: ddimatos <dimatos@gmail.com> * Updated doc and exceptions Signed-off-by: ddimatos <dimatos@gmail.com> * Update logic Signed-off-by: ddimatos <dimatos@gmail.com> * Update logic Signed-off-by: ddimatos <dimatos@gmail.com> * Updates to support managed users Signed-off-by: ddimatos <dimatos@gmail.com> * debug stmts Signed-off-by: ddimatos <dimatos@gmail.com> * fix test case Signed-off-by: ddimatos <dimatos@gmail.com> * fix test case Signed-off-by: ddimatos <dimatos@gmail.com> * Debug stmt Signed-off-by: ddimatos <dimatos@gmail.com> * Debug stmt Signed-off-by: ddimatos <dimatos@gmail.com> * Debug stmt Signed-off-by: ddimatos <dimatos@gmail.com> * Debug stmt Signed-off-by: ddimatos <dimatos@gmail.com> * Debug stmt Signed-off-by: ddimatos <dimatos@gmail.com> * debug Signed-off-by: ddimatos <dimatos@gmail.com> * debug Signed-off-by: ddimatos <dimatos@gmail.com> * debug Signed-off-by: ddimatos <dimatos@gmail.com> * debug Signed-off-by: ddimatos <dimatos@gmail.com> * debug Signed-off-by: ddimatos <dimatos@gmail.com> * bug Signed-off-by: ddimatos <dimatos@gmail.com> * debug Signed-off-by: ddimatos <dimatos@gmail.com> * debug Signed-off-by: ddimatos <dimatos@gmail.com> * Added ssh config append Signed-off-by: ddimatos <dimatos@gmail.com> * Update zos_copy and pipeline framework to support dynaic users Signed-off-by: ddimatos <dimatos@gmail.com> * Fixed bug that mixed up dir and files Signed-off-by: ddimatos <dimatos@gmail.com> * Added a todo comment for AC Signed-off-by: ddimatos <dimatos@gmail.com> * Update users py to add new execute function Signed-off-by: ddimatos <dimatos@gmail.com> * updates to complete the ability to use a managed user Signed-off-by: ddimatos <dimatos@gmail.com> * Fixes E123: closing bracket does not match indentation Signed-off-by: ddimatos <dimatos@gmail.com> * Correct name of ManagedUseeType to ManagedUserType Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * Update tests/functional/modules/test_zos_copy_func.py Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * PR review comments addressed Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> * update changelog fragment Signed-off-by: ddimatos <dimatos@gmail.com> --------- Signed-off-by: ddimatos <dimatos@gmail.com> Co-authored-by: Fernando Flores <fernandofloresdev@gmail.com> --- ac | 1 + .../1766-zos_copy-racf-uacc-updates.yml | 12 + plugins/modules/zos_copy.py | 38 +- .../functional/modules/test_zos_copy_func.py | 156 ++- tests/helpers/users.py | 930 ++++++++++++++++++ 5 files changed, 1107 insertions(+), 30 deletions(-) create mode 100644 changelogs/fragments/1766-zos_copy-racf-uacc-updates.yml create mode 100644 tests/helpers/users.py diff --git a/ac b/ac index 46bd21ef4..b4dbf9a0c 100755 --- a/ac +++ b/ac @@ -684,6 +684,7 @@ ac_test(){ message_error "Unable to find test configration in ${VENV}/config.yml." fi + # TODO: Consider adding the -vvvv like so `$CURR_DIR/${file} -vvvv --ignore="${skip}"` so that you can access the verbosity feature of pytest. if [ "$file" ]; then . ${VENV_BIN}/activate && export ANSIBLE_LIBRARY=$VENV/ansible_collections/ibm/ibm_zos_core/plugins/modules;export ANSIBLE_CONFIG=$VENV/ansible.cfg;${VENV_BIN}/pytest $CURR_DIR/${file} --ignore="${skip}" --host-pattern=all --zinventory=${VENV}/config.yml ${debug} >&2 ; echo $? >&1 else diff --git a/changelogs/fragments/1766-zos_copy-racf-uacc-updates.yml b/changelogs/fragments/1766-zos_copy-racf-uacc-updates.yml new file mode 100644 index 000000000..fba927f0d --- /dev/null +++ b/changelogs/fragments/1766-zos_copy-racf-uacc-updates.yml @@ -0,0 +1,12 @@ +bugfixes: + - zos_copy - Improve module zos_copy error handling when the user does not have + universal access authority set to UACC(READ) for SAF Profile + 'MVS.MCSOPER.ZOAU' and SAF Class OPERCMDS. The module now handles the exception + and returns an informative message. + (https://github.com/ansible-collections/ibm_zos_core/pull/1766). +trivial: + - pipeline - Deliver a new users.py framework that allows functional test cases to + request a managed user type where this user can have limited access to some SAF + profile, or saf class as well as user id's with specific patterns such as including + supported special characters such as '@', '#', etc. + (https://github.com/ansible-collections/ibm_zos_core/pull/1766). diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 3629962fd..a14f26661 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -3165,13 +3165,19 @@ def data_set_locked(dataset_name): Parameters ---------- - dataset_name : str + dataset_name (str): The data set name used to check if there is a lock. Returns ------- bool True if the data set is locked, or False if the data set is not locked. + + Raises + ------ + CopyOperationError + When the user does not have Universal Access Authority to + ZOAU SAF Profile 'MVS.MCSOPER.ZOAU' and SAF Class OPERCMDS. """ # Using operator command "D GRS,RES=(*,{dataset_name})" to detect if a data set # is in use, when a data set is in use it will have "EXC/SHR and SHARE" @@ -3179,18 +3185,26 @@ def data_set_locked(dataset_name): result = dict() result["stdout"] = [] command_dgrs = "D GRS,RES=(*,{0})".format(dataset_name) - response = opercmd.execute(command=command_dgrs) - stdout = response.stdout_response - if stdout is not None: - for out in stdout.split("\n"): - if out: - result["stdout"].append(out) - if len(result["stdout"]) > 4 and "EXC/SHR" in stdout and "SHARE" in stdout: + + try: + response = opercmd.execute(command=command_dgrs) + stdout = response.stdout_response + + if stdout is not None: + for out in stdout.split("\n"): + if out: + result["stdout"].append(out) + if len(result["stdout"]) <= 4 and "NO REQUESTORS FOR RESOURCE" in stdout: + return False + return True - elif len(result["stdout"]) <= 4 and "NO REQUESTORS FOR RESOURCE" in stdout: - return False - else: - return False + except zoau_exceptions.ZOAUException as copy_exception: + raise CopyOperationError( + msg="Unable to determine if the dest {0} is in use.".format(dataset_name), + rc=copy_exception.response.rc, + stdout=copy_exception.response.stdout_response, + stderr=copy_exception.response.stderr_response + ) def run_module(module, arg_def): diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 61ba9982d..e20e48b48 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -13,6 +13,7 @@ from __future__ import absolute_import, division, print_function +from ibm_zos_core.tests.helpers.users import ManagedUserType, ManagedUser import pytest import os import shutil @@ -365,6 +366,7 @@ def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, lo wait_time_s=60 ) for result in job_result.contacted.values(): + print(result) rc = result.get("jobs")[0].get("ret_code").get("code") finally: hosts.all.file(path=temp_jcl_uss_path, state="absent") @@ -1960,8 +1962,15 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq -@pytest.mark.parametrize("ds_type", [ "pds", "pdse", "seq"]) -def test_copy_dest_lock(ansible_zos_module, ds_type): +@pytest.mark.parametrize("ds_type, f_lock",[ + ( "pds", True), # Success path, pds locked, force_lock enabled and user authorized + ( "pdse", True), # Success path, pdse locked, force_lock enabled and user authorized + ( "seq", True), # Success path, seq locked, force_lock enabled and user authorized + ( "pds", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." + ( "pdse", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." + ( "seq", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." +]) +def test_copy_dest_lock(ansible_zos_module, ds_type, f_lock ): hosts = ansible_zos_module data_set_1 = get_tmp_ds_name() data_set_2 = get_tmp_ds_name() @@ -1973,7 +1982,6 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): src_data_set = data_set_1 dest_data_set = data_set_2 try: - hosts = ansible_zos_module hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) if ds_type == "pds" or ds_type == "pdse": @@ -1999,27 +2007,139 @@ def test_copy_dest_lock(ansible_zos_module, ds_type): dest = dest_data_set, remote_src = True, force=True, - force_lock=True, + force_lock=f_lock, ) for result in results.contacted.values(): print(result) - assert result.get("changed") == True - assert result.get("msg") is None - # verify that the content is the same - verify_copy = hosts.all.shell( - cmd="dcat \"{0}\"".format(dest_data_set), - executable=SHELL_EXECUTABLE, - ) - for vp_result in verify_copy.contacted.values(): - print(vp_result) - verify_copy_2 = hosts.all.shell( - cmd="dcat \"{0}\"".format(src_data_set), + if f_lock: #and apf_auth_user: + assert result.get("changed") == True + assert result.get("msg") is None + # verify that the content is the same + verify_copy = hosts.all.shell( + cmd="dcat \"{0}\"".format(dest_data_set), executable=SHELL_EXECUTABLE, ) - for vp_result_2 in verify_copy_2.contacted.values(): - print(vp_result_2) - assert vp_result_2.get("stdout") == vp_result.get("stdout") + for vp_result in verify_copy.contacted.values(): + print(vp_result) + verify_copy_2 = hosts.all.shell( + cmd="dcat \"{0}\"".format(src_data_set), + executable=SHELL_EXECUTABLE, + ) + for vp_result_2 in verify_copy_2.contacted.values(): + print(vp_result_2) + assert vp_result_2.get("stdout") == vp_result.get("stdout") + elif not f_lock: + assert result.get("failed") is True + assert result.get("changed") == False + assert "because a task is accessing the data set" in result.get("msg") + assert result.get("rc") is None + finally: + # extract pid + ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") + # kill process - release lock - this also seems to end the job + pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] + hosts.all.shell(cmd="kill 9 {0}".format(pid.strip())) + # clean up c code/object/executable files, jcl + hosts.all.shell(cmd=f'rm -r {temp_dir}') + # remove pdse + hosts.all.zos_data_set(name=data_set_1, state="absent") + hosts.all.zos_data_set(name=data_set_2, state="absent") + + +def test_copy_dest_lock_test_with_no_opercmd_access_pds_without_force_lock(ansible_zos_module): + """ + This tests the module exeception raised 'msg="Unable to determine if the source {0} is in use.".format(dataset_name)'. + This this a wrapper for the actual test case `managed_user_copy_dest_lock_test_with_no_opercmd_access`. + """ + managed_user = None + managed_user_test_case_name = "managed_user_copy_dest_lock_test_with_no_opercmd_access" + try: + # Initialize the Managed user API from the pytest fixture. + managed_user = ManagedUser.from_fixture(ansible_zos_module) + + # Important: Execute the test case with the managed users execution utility. + managed_user.execute_managed_user_test( + managed_user_test_case = managed_user_test_case_name,debug = True, + verbose = False, managed_user_type=ManagedUserType.ZOAU_LIMITED_ACCESS_OPERCMD) + + finally: + # Delete the managed user on the remote host to avoid proliferation of users. + managed_user.delete_managed_user() + +@pytest.mark.parametrize("ds_type, f_lock",[ + ( "pds", False), # Module exception raised msg="Unable to determine if the source {0} is in use.".format(dataset_name) + ( "pdse", False), # Module exception raised msg="Unable to determine if the source {0} is in use.".format(dataset_name) + ( "seq", False), # Module exception raised msg="Unable to determine if the source {0} is in use.".format(dataset_name) + ( "seq", True), # Opercmd is not called so a user with limited UACC will not matter and will succeed +]) +def managed_user_copy_dest_lock_test_with_no_opercmd_access(ansible_zos_module, ds_type, f_lock ): + """ + When force_lock option is false, it exercises the opercmd call which requires RACF universal access. + This negative test will ensure that if the user does not have RACF universal access that the module + not halt execution and instead bubble up the ZOAU exception. + """ + hosts = ansible_zos_module + data_set_1 = get_tmp_ds_name() + data_set_2 = get_tmp_ds_name() + member_1 = "MEM1" + + if ds_type == "pds" or ds_type == "pdse": + src_data_set = data_set_1 + "({0})".format(member_1) + dest_data_set = data_set_2 + "({0})".format(member_1) + else: + src_data_set = data_set_1 + dest_data_set = data_set_2 + try: + hosts.all.zos_data_set(name=data_set_1, state="present", type=ds_type, replace=True) + hosts.all.zos_data_set(name=data_set_2, state="present", type=ds_type, replace=True) + if ds_type == "pds" or ds_type == "pdse": + hosts.all.zos_data_set(name=src_data_set, state="present", type="member", replace=True) + hosts.all.zos_data_set(name=dest_data_set, state="present", type="member", replace=True) + # copy text_in source + hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) + # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) + temp_dir = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', force=True) + hosts.all.zos_copy( + content=call_c_jcl.format(temp_dir, dest_data_set), + dest=f'{temp_dir}/call_c_pgm.jcl', + force=True + ) + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=f"{temp_dir}/") + # submit jcl + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=f"{temp_dir}/") + # pause to ensure c code acquires lock + time.sleep(10) + results = hosts.all.zos_copy( + src = src_data_set, + dest = dest_data_set, + remote_src = True, + force=True, + force_lock=f_lock, + ) + for result in results.contacted.values(): + if f_lock: + assert result.get("changed") == True + assert result.get("msg") is None + # verify that the content is the same + verify_copy = hosts.all.shell( + cmd="dcat \"{0}\"".format(dest_data_set), + executable=SHELL_EXECUTABLE, + ) + for vp_result in verify_copy.contacted.values(): + verify_copy_2 = hosts.all.shell( + cmd="dcat \"{0}\"".format(src_data_set), + executable=SHELL_EXECUTABLE, + ) + for vp_result_2 in verify_copy_2.contacted.values(): + assert vp_result_2.get("stdout") == vp_result.get("stdout") + elif not f_lock: + assert result.get("failed") is True + assert result.get("changed") == False + assert "Unable to determine if the dest" in result.get("msg") + assert "BGYSC0819E Insufficient security authorization for resource MVS.MCSOPER.ZOAU in class OPERCMDS" in result.get("stderr") + assert result.get("rc") == 6 finally: # extract pid ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") diff --git a/tests/helpers/users.py b/tests/helpers/users.py new file mode 100644 index 000000000..1dd341572 --- /dev/null +++ b/tests/helpers/users.py @@ -0,0 +1,930 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2024 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# #################################################################### +# The users.py file contains various utility functions that +# will support functional test cases. For example, request a randomly +# generated user with specific limitations. +# #################################################################### + +import inspect +from io import StringIO +from enum import Enum +import json +import random +import shutil +import string +import subprocess +import os +import glob +import re +import subprocess +from typing import List, Tuple + +class ManagedUserType (Enum): + """ + Represents the z/OS users available for functional testing. + + Managed user types + ------------------ + - ZOAU_LIMITED_ACCESS_OPERCMD + - ZOS_BEGIN_WITH_AT_SIGN + - ZOS_BEGIN_WITH_POUND + - ZOS_RANDOM_SYMBOLS + - ZOS_LIMITED_HLQ + - ZOS_LIMITED_TMP_HLQ + """ + + ZOAU_LIMITED_ACCESS_OPERCMD=("zoau_limited_access_opercmd") + """ + A z/OS managed user with restricted access to ZOAU + SAF Profile 'MVS.MCSOPER.ZOAU' and SAF Class OPERCMDS + with universal access authority set to UACC(NONE). With + UACC as NONE, this user will be refused access to the + ZOAU opercmd utility. + """ + + ZOS_BEGIN_WITH_AT_SIGN=("zos_begin_with_at_sign") + """ + A z/OS managed user ID that begins with the '@' symbol. + User retains the universal access of the original user (model user). + """ + + ZOS_BEGIN_WITH_POUND=("zos_begin_with_pound") + """ + A z/OS managed user ID that begins with the '#' symbol. + User retains the universal access of the original user (model user). + """ + + ZOS_RANDOM_SYMBOLS=("zos_random_symbols") + """ + A z/OS managed user ID that is randomly assigned symbols, '#', '$', '@'. + User retains the universal access of the original user (model user). + """ + # TODO: Implement this, use the recommended HLQ + ZOS_LIMITED_HLQ=("zos_limited_hlq") + """ + A z/OS managed user with restricted access to High Level + Qualifiers (HLQ): (RESTRICT, NOPERMIT, ....). + """ + + # TODO: Implement this, use the recommended tmp HLQ + ZOS_LIMITED_TMP_HLQ=("zos_limited_tmp_hlq") + """ + A z/OS managed user with restricted access to temporary + High Level Qualifiers (HLQ): (TSTRICT, TNOPERM, ....). + """ + + def __str__(self) -> str: + """ + Return the ManagedUserType name as upper case. + """ + return self.name.upper() + + def string(self) -> str: + """ + Returns the ManagedUserType value as uppercase. + """ + return self.value.upper() + +class ManagedUser: + """ + This class provides methods in which can provide a user and password for a requested + ManagedUserType. + + Usage + ----- + The pytest fixture (ansible_zos_module) is a generator object done so by 'yield'ing; + where a yield essentially pauses (streaming) a function and the state and control is + goes to the function that called it. Thus the fixture can't be passed to this API to + be updated with the new user or use the fixture to perform managed node inquiries, + thus SSH is used for those managed node commands. + + Another important note is when using this API, you can't parametrize test cases, because + once the pytest fixture (ansible_zos_module) is updated with a new user and performs a + remote operation on a managed node, the fixture's user can not be changed because control + is passed back and all attempts to change the user for reuse will fail, unless your goal + is to use the same managedUserType in the parametrization this is not recommended. + + + Example + ------- + from ibm_zos_core.tests.helpers.users import ManagedUserType + + def test_demo_how_to_use_managed_user(ansible_zos_module): + # This demonstrates a user who has specific requirements + hosts = ansible_zos_module + managed_user = None + + who = hosts.all.shell(cmd = "whoami") + for person in who.contacted.values(): + print(f"Who am I BEFORE asking for a managed user = {person.get("stdout")}") + + try: + # Initialize the Managed user API from the pytest fixture. + managed_user = ManagedUser.from_fixture(ansible_zos_module) + + # Important: Execute the test case with the managed users execution utility. + managed_user.execute_managed_user_test( + managed_user_test_case = "managed_user_test_demo_how_to_use_managed_user", + debug = True, verbose = False, managed_user_type=ManagedUserType.ZOAU_LIMITED_ACCESS_OPERCMD) + + finally: + # Delete the managed user on the remote host to avoid proliferation of users. + managed_user.delete_managed_user() + + def managed_user_test_demo_how_to_use_managed_user(ansible_zos_module): + hosts = ansible_zos_module + who = hosts.all.shell(cmd = "whoami") + for person in who.contacted.values(): + print(f"Who am I AFTER asking for a managed user = {person.get("stdout")}") + + + Example Output + -------------- + Who am I BEFORE asking for a managed user = BPXROOT + Who am I AFTER asking for a managed user = LJBXMONV + """ + + def __init__(self, model_user: str = None, remote_host: str = None, zoau_path: str = None, pyz_path: str = None, pythonpath: str = None, volumes: str = None, hostpattern: str = None) -> None: + """ + Initialize class with necessary parameters. + + Parameters + ---------- + model_user (str): + The user that will connect to the managed node and execute RACF commands + and serve as a model for other users attributes. + This user should have enough authority to perform RACF operations. + remote_host (str): + The z/OS managed node (host) to connect to to create the managed user. + """ + self._model_user = model_user + self._remote_host = remote_host + self._zoau_path = zoau_path + self._pyz_path = pyz_path + self._pythonpath = pythonpath + self._volumes = volumes + self._hostpattern = "all" # can also get it from options host_pattern + self._managed_racf_user = None + self._managed_user_group = None + self._ssh_config_file_size = 0 + self._ssh_directory_present = True + self._create_ssh_config_and_directory() + + @classmethod + def from_fixture(cls, pytest_fixture): + + remote_host = pytest_fixture["options"]["inventory"].replace(",", "") + model_user = pytest_fixture["options"]["user"] + inventory_hosts = pytest_fixture["options"]["inventory_manager"]._inventory.hosts + inventory_list = list(inventory_hosts.values())[0].vars.get('ansible_python_interpreter').split(";") + zoau_path = [v for v in inventory_list if f"ZOAU_HOME=" in v][0].split('=')[1].strip() or None + pythonpath = [v for v in inventory_list if f"PYTHONPATH=" in v][0].split('=')[1].strip() or None + pyz_path = [v for v in inventory_list if f"bin/python" in v][0].split('/bin')[0].strip() or None + # TODO: To make this dynamic, we need to update AC and then also test with the new fixture because + # the legacy fixture is using a VOLUMES keyword while raw fixture uses extra_args. Best to move + # volumes to extra_args. + volumes = "000000,222222" + hostpattern = pytest_fixture["options"]["host_pattern"] + return cls(model_user, remote_host, zoau_path, pyz_path, pythonpath, volumes, hostpattern) + + + def _connect(self, remote_host:str , model_user: str, command: str) -> List[str]: + """ + Connect to the remote managed node and execute requested command. + + Parameters + ---------- + remote_host (str) + The z/OS managed node (host) to connect to to create the managed user. + model_user (str) + The user that will connect to the managed node and execute RACF commands + and serve as a model for other users attributes. + This user should have enough authority to perform RACF operations. + command (str) + Command to run on the managed node. + + Returns + ------- + List[str] + Command result as a list of strings. + """ + ssh_command = ["ssh", f"{model_user}@{remote_host}", command] + result = subprocess.run(ssh_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + + # If the RC=20 (command failure) - let this fall through for now, will be caught by command processors. + # If the RC=255, raise exection for the connection + if result.returncode == 255: + raise Exception(f"Unable to connect remote user [{model_user}] to remote host [{remote_host}], RC [{result.returncode}], stdout [{result.stdout}], stderr [{result.stderr}].") + + return [line.strip() for line in result.stdout.split('\n')] + + def _create_ssh_keys(self, directory:str) -> None: + """ + Create SSH keys for the new managed user to be used for password-less authentication. + Creates both RSA and ED25519 because some of the systems only take one or the other + so both are generated and shared now. + + Parameters + ---------- + directory (str) + The directory where to create the ssh keys. + + Raise + ----- + Exception - if unable to create or run ssh-keygen. + """ + escaped_user = re.escape(self._managed_racf_user) + key_command = ["mkdir", "-p", f"{directory}/{escaped_user}"] + result = subprocess.run(key_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + if result.returncode != 0: + raise Exception(f"Unable to create keys {result.stdout}, {result.stdout}") + + key_command = ["ssh-keygen", "-q","-t", "rsa", "-N", "", "-f", f"{directory}/{escaped_user}/id_rsa"] + result = subprocess.run(key_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + if result.returncode != 0: + raise Exception(f"Unable to create keys {result.stdout}, {result.stdout}") + + key_command = ["ssh-keygen", "-q", "-t", "ed25519", "-N", "", "-f", f"{directory}/{escaped_user}/id_ed25519"] + result = subprocess.run(key_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + if result.returncode != 0: + raise Exception(f"Unable to create keys {result.stdout}, {result.stdout}") + + + def _ssh_config_append(self): + """ + Appends necessary configurations needed to use the managed user in a containerized environment but also + works for non-containerized. This will append the custom ssh key paths to '~/.ssh/config', typically /tmp/{user}/*. + + Notes + ----- + Although this logic of creating temporary keys is not needed when managed VENVs are used via `.ac/` , because + those IDs have access to the controller's keys are are passed to the managed node as the new users id. For now, + this logic is applied to both venv's and containers. I don't forsee any concurrent issues with updating + the config at this time. + + See Also + -------- + :py:member:`delete_managed_user()` for cleaning up and restoring the '~/.ssh/config' which calls + `_ssh_config_remove_host()`. + :py:member:`_create_ssh_keys` for creating the ssh keys for the new managed user. + """ + escaped_user = re.escape(self._managed_racf_user) + config_file = os.path.expanduser("~/.ssh/config") + + with open(config_file, "a") as f: + f.write(f"\nHost {self._remote_host}\n") + f.write(f" Hostname {self._remote_host}\n") + f.write(f" IdentityFile ~/.ssh/id_rsa\n") + f.write(f" IdentityFile ~/.ssh/id_ed25519\n") + f.write(f" IdentityFile /tmp/{escaped_user}/id_ed25519\n") + f.write(f" IdentityFile /tmp/{escaped_user}/id_rsa\n") + + # If you need to debug, uncomment this to see what is put in to the ssh/config. + # with open(config_file, 'r') as f: + # print(f"Config file {config_file} contents, f.read()") + + def _create_ssh_config_and_directory(self) -> None: + """ + This method will create as well as track the prior state of the ssh config and .ssh directory. + During class initialization this is called to determine the ssh config state, eg does the + 'ssh/' dir exit, does the 'ssh/config' exist, is the 'ssh/config', empty, etc. This is done + so that on deletion of the user, the config file or directory can be properly restored. + + Notes: + Class variable '_ssh_config_file_size' can have values: + - `-1` - if the file does not exist + - `0` - if the file exists and has no content + - `> 0` - if the file existed prior to updates. + Default '_ssh_config_file_size = True' + + Class variable '_ssh_directory_present' can have values: + - `True` if the '~/.ssh' directory was present at the time this class was instantiated + - `False` if the '~/.ssh' directory was not present at the time this class was instantiated + Default '_ssh_directory_present = 0' + """ + ssh_config_dir = os.path.expanduser("~/.ssh") + ssh_config_file = os.path.expanduser("~/.ssh/config") + + if not os.path.exists(ssh_config_dir): + # Set class variable indicators + self._ssh_directory_present = False + self._ssh_config_file_size = -1 + + # Create the empty directory + os.makedirs(ssh_config_dir) + + # Create the empty file + open(ssh_config_dir, 'a').close() + else: + try: + self._ssh_config_file_size = os.stat(ssh_config_file).st_size + except FileNotFoundError: + # If the config does not exist, set it to -1 so we know to completely remove the config. + self._ssh_config_file_size = -1 + # Create the empty file + open(ssh_config_file, 'a').close() + + + def _ssh_config_remove_host(self) -> None: + """ + This method reads the '~/.ssh/config' and will remove any added entries that match to the newly + created managed user, ensuring that the original filed be restored to its previous state. + + This method uses class variable '_ssh_directory_present' which can have values: + - `True` if the '~/.ssh' directory was present at the time this class was instantiated + - `False` if the '~/.ssh' directory was not present at the time this class was instantiated + - Default '_ssh_directory_present = 0' + """ + + # Delete entry from shell (useful for AC): sed 's/^Host/\n&/' ~/.ssh/config | sed '/^Host '"$host"'$/,/^$/d;/^$/d' + # Optionally Python: cmd = f"sed 's/^Host/\\n&/' {file} | sed '/^Host '\"{host}\"'$/,/^$/d;/^$/d'" + # subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + ssh_config_dir = os.path.expanduser("~/.ssh") + ssh_config_file = os.path.expanduser("~/.ssh/config") + + # No .ssh/ dir existed to begin with, delete it all. + if not self._ssh_directory_present: + shutil.rmtree(ssh_config_dir) + # .ssh/ dir exists but no config existed, remove config + elif self._ssh_config_file_size == -1: + os.remove(ssh_config_file) + # File previously existed so remove only the updated portions, restoring the original. + elif self._ssh_config_file_size >= 0: + is_host = False + with open(ssh_config_file, "r") as fr: + lines = fr.readlines() + + with open(ssh_config_file, "w") as fw: + for line in lines: + if len(line.split()) > 0 and line.split()[0] == "Host": + if self._remote_host in line: + is_host = True + else: + is_host = False + if not is_host: + fw.write(line) + + + def execute_managed_user_test(self, managed_user_test_case: str, debug: bool = False, verbose: bool = False, managed_user_type: ManagedUserType = None) -> None: + """ + Executes the test case using articulated pytest options when the test case needs a managed user. This is required + to execute any test that needs a manage user, a wrapper test case should call this method, the 'managed_user_test_case' + must begin with 'managed_user_' as opposed to 'test_', this because the pytest command built will override the ini + with this value. + + Parameters + ---------- + managed_user_test_case (str) + The managed user test case that begins with 'managed_user_' + debug (str) + Enable verbose output for pytest, the equivalent command line option of '-s'. + verbose (str) + Enables pytest verbosity level 4 (-vvvv) + + Raises + ------ + Exception - if the test case fails (non-zero RC from pytest/subprocess), the stdout and stderr are returned for evaluation. + ValueError - if the managed user is not created, you must call `self._managed_racf_user()`. + + See Also + -------- + :py:member:`_create_managed_user` required before this function can be used as a managed user needs to exist. + """ + + if managed_user_test_case is None or not managed_user_test_case.startswith("managed_user_"): + raise ValueError("Test cases using a managed user must begin with 'managed_user_' to be collected for execution.") + + # if not self._managed_racf_user: + # raise ValueError("No managed user has been created, please ensure that the method `self._managed_racf_user()` has been called prior.") + + self._create_managed_user(managed_user_type) + + # Get the file path of the caller function + calling_test_path = inspect.getfile(inspect.currentframe().f_back) + + # Get the test case name that this code is being run from, this is not an function arg. + # managed_user_test_case = inspect.stack()[1][3] + + testcase = f"{calling_test_path}::{managed_user_test_case}" + # hostpattern = "all" # can also get it from options host_pattern + capture = " -s" + verbosity = " -vvvv" + + inventory: dict [str, str] = {} + inventory.update({'host': self._remote_host}) + inventory.update({'user': self._managed_racf_user}) + inventory.update({'zoau': self._zoau_path}) # get this from fixture + inventory.update({'pyz': self._pyz_path}) # get this from fixture + inventory.update({'pythonpath': self._pythonpath}) # get this from fixture + extra_args = {} + extra_args.update({'extra_args':{'volumes':self._volumes.split(",")}}) # get this from fixture + inventory.update(extra_args) + + node_inventory = json.dumps(inventory) + + # Carefully crafted 'pytest' command to be allow for it to be called from anther test driven by pytest and uses the zinventory-raw fixture. + pytest_cmd = f"""pytest {testcase} --override-ini "python_functions=managed_user_" --host-pattern={self._hostpattern}{capture if debug else ""}{verbosity if verbose else ""} --zinventory-raw='{node_inventory}'""" + result = subprocess.run(pytest_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, shell=True) + if result.returncode != 0: + raise Exception(result.stdout + result.stderr) + else: + print(result.stdout) + + + def delete_managed_user(self) -> None: + """ + Delete managed user from z/OS managed node. Performs clean up of the remote + system removing the RACF user, RACF Group, TSO ID and home directory and its + contents. + + Raises + ------ + Exception + If any of the remote commands return codes are out of range an exception + and the stdout and stderr is returned. + """ + # Clean up the ~/.ssh/config file + self._ssh_config_remove_host() + + # Remove the OMVS segment from the remote hoste + escaped_user = re.escape(self._managed_racf_user) + command = StringIO() + command.write(f"echo Deleting USER '{self._managed_racf_user}';") + command.write(f"tsocmd DELUSER {escaped_user};") + command.write(f"echo DELUSER '{escaped_user}' RC=$?;") + command.write(f"tsocmd DELGROUP {self._managed_user_group};") + command.write(f"echo DELGROUP '{self._managed_user_group}' RC=$?;") + + # Access additional module user attributes for use in a new user. + cmd=f"{command.getvalue()}" + results_stdout_lines = self._connect(self._remote_host, self._model_user,cmd) + + deluser_rc = [v for v in results_stdout_lines if f"DELUSER {escaped_user} RC=" in v][0].split('=')[1].strip() or None + if not deluser_rc or int(deluser_rc[0]) > 0: + raise Exception(f"Unable to delete user {escaped_user}, please review the command output {results_stdout_lines}.") + + delgroup_rc = [v for v in results_stdout_lines if f"DELGROUP {self._managed_user_group} RC=" in v][0].split('=')[1].strip() or None + if not delgroup_rc or int(delgroup_rc[0]) > 0: + raise Exception(f"Unable to delete user {escaped_user}, please review the command output {results_stdout_lines}.") + + + def _get_random_passwd(self) -> str: + """ + Generate a random password of length 8 adhering a supported common password + pattern. + + Returns + ------- + str + Password string with pattern [CCCNCCCC]. + - (C) Random characters A - Z + - (N) Random integers 1 - 9 + """ + letters = string.ascii_uppercase + start = ''.join(random.choice(letters) for i in range(3)) + middle = ''.join(str(random.randint(1,9))) + end = ''.join(random.choice(letters) for i in range(4)) + return f"{start}{middle}{end}" + + def _get_random_user(self, managed_user: ManagedUserType = None) -> str: + """ + Generate a random user of length 8 adhering the ManagedUserType + requested. + + Parameters + ---------- + managed_user (ManagedUserType) + The requested managed user type that correlates to how the user name will be created. + Default is a user id consistent with random A - Z. + + See Also + -------- + :py:class:`ManagedUserType` + + Returns + ------- + str + A user id suitable for RACF and based on the ManagedUserType. + A user id can contain any of the supported symbols A-Z, 0-9, #, $, or @. + """ + letters = string.ascii_uppercase + if managed_user is not None: + if managed_user.name == ManagedUserType.ZOS_BEGIN_WITH_AT_SIGN.name: + return "@" + ''.join(random.choice(letters) for i in range(7)) + elif managed_user.name == ManagedUserType.ZOS_BEGIN_WITH_POUND.name: + return "#" + ''.join(random.choice(letters) for i in range(7)) + elif managed_user.name == ManagedUserType.ZOS_RANDOM_SYMBOLS.name: + letters = string.ascii_uppercase + numbers = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'] + symbols = ['#', '$', '@',] + + prefix = random.sample(letters,1) + alphas = random.sample(letters,2) + num = random.sample(numbers,2) + sym = random.sample(symbols,3) + alphas.extend(num) + alphas.extend(sym) + random.shuffle(alphas) + return str(prefix[0]) + ''.join([str(entry) for entry in alphas]) + + # All other cases can use any formatted user name, so random 8 letters is fine. + return ''.join(random.choice(letters) for i in range(8)) + + def _read_files_in_directory(self, directory: str = "~/.ssh", pattern: str = "*.pub") -> List[str]: + """ + Reads files in a directory that match the pattern and return file names + as a list of strings, each list index is a file name. + + Parameters + ---------- + directory (str): + The directory to search for files matching a pattern. Default, '~/.ssh'. + pattern (str): + The pattern to match file names. Default, '*.pub'. + + Returns + ------- + List[str] + A list of file names, each list index is a file name matching the pattern. + """ + file_contents_as_list = [] + + # Expand the tilde to the home directory + expanded_directory = os.path.expanduser(directory) + + for filename in glob.glob(os.path.join(expanded_directory, pattern)): + with open(filename, 'r') as f: + file_contents_as_list.append(f.read().rstrip('\n')) + return file_contents_as_list + + + def _copy_ssh_key(model_user: str, passwd: str, remote_host: str, key_path: str): + """ + Copy SSH key to a remote host using subprocess. + + Note + ---- + This requires that the host have 'sshpass' installed. + + Parameters + ---------- + remote_host (str) + The z/OS managed node (host) to connect to to create the managed user. + model_user (str) + The user that will connect to the managed node and execute RACF commands + and serve as a model for other users attributes. This user should have + enough authority to perform RACF operations. + command (str) + Command to run on the managed node. + + Returns + ------- + List[str] + A list of file names, each list index is a file name matching the pattern. + + See Also + -------- + :py:func:`_read_files_in_directory` to copy the public keys. + """ + command = ["sshpass", "-p", f"{passwd}", "ssh-copy-id", "-o", "StrictHostKeyChecking=no", "-i", f"{key_path}", f"{model_user}@{remote_host}"] #, "&>/dev/null"] + result = subprocess.run(args=command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=False) + + if result.returncode != 0: + raise Exception("Unable to copy public keys to remote host, check that sshpass is installed.") + + + def _create_managed_user(self, managed_user: ManagedUserType) -> Tuple[str, str]: + """ + Generate a managed user for the remote node according to the ManagedUserType selected. + + Parameters + ---------- + managed_user (ManagedUserType) + The requested managed user type that correlates to how the user name will be created. + Default is a user id consistent with random A - Z. + + See Also + -------- + :py:class:`ManagedUserType` + + Returns + ------- + Tuple[str, str] + A managed users ID and password as a tuple. eg (user, passwd) + + Raises + ------ + Exception + If any of the remote commands return codes are out of range an exception + and the stdout and stderr is returned. + """ + # Create random user based on ManagedUserType requirements + self._managed_racf_user = self._get_random_user(managed_user) + + # Generate the password to establish a password-less connection and use with RACF. + passwd = self._get_random_passwd() + + try: + # Access module user's TSO attributes for reuse + cmd=f"tsocmd LISTUSER {self._model_user} TSO NORACF" + model_listuser_tso_attributes = self._connect(self._remote_host, self._model_user,cmd) + + # Match the tso list results and place in variables (not all are used) + model_tso_acctnum = [v for v in model_listuser_tso_attributes if "ACCTNUM" in v][0].split('=')[1].strip() or None + if not model_tso_acctnum: + err_details = "TSO account number" + err_msg = f"Unable access the model user [{self._managed_racf_user}] {err_details}, this is required to create user [{self._managed_racf_user}]." + raise Exception(err_msg) + + model_tso_proc = [v for v in model_listuser_tso_attributes if "PROC" in v][0].split('=')[1].strip() or None + if not model_tso_proc: + err_details = "TSO Proc" + err_msg = f"Unable access the model user [{self._managed_racf_user}] {err_details}, this is required to create user [{self._managed_racf_user}]." + raise Exception(err_msg) + except IndexError as err: + err_msg = f"Unable access the model user [{self._managed_racf_user}] results, this is required to create user [{self._managed_racf_user}]." + raise Exception(f"{err_msg}, exception [{err}].") + except Exception as err: + raise Exception(f"Unable to LISTUSER TSO NORACF for [{self._model_user}], exception [{err}]") + + # TODO: These are currently not used when creating the user, consider minimally adding SIZE and MAXXSIZE to dynamic users + # model_tso_size = [v for v in model_listuser_tso_attributes if "SIZE" in v][0].split('=')[1].strip() or None + # model_tso_maxsize = [v for v in model_listuser_tso_attributes if "MAXSIZE" in v][0].split('=')[1].strip() or None + # model_tso_unit = [v for v in model_listuser_tso_attributes if "UNIT" in v][0].split('=')[1].strip() or "" + # model_tso_userdata = [v for v in model_listuser_tso_attributes if "USERDATA" in v][0].split('=')[1].strip() or "" + # model_tso_command = [v for v in model_listuser_tso_attributes if "COMMAND" in v][0].split('=')[1].strip() or "" + + try: + # Access additional module user attributes for use in a new user. + cmd=f"tsocmd LISTUSER {self._model_user}" + model_listuser_attributes = self._connect(self._remote_host, self._model_user,cmd) + + # Match the list user results and place in variables (not all are used) + model_owner = [v for v in model_listuser_attributes if "OWNER" in v][0].split('OWNER=')[1].split()[0].strip() or None + + if not model_owner: + err_details = "OWNER" + err_msg = f"Unable access the model user [{self._managed_racf_user}] {err_details}, this is required to create user [{self._managed_racf_user}]." + raise Exception(err_msg) + except IndexError as err: + err_msg = f"Unable access the results, this is required to create user [{self._managed_racf_user}]." + raise Exception(f"{err_msg}, exception [{err}].") + except Exception as err: + raise Exception(f"Unable to LISTUSER for {self._model_user}, exception [{err}]") + + # Collect the various public keys for use with the z/OS managed node, some accept only RSA and others ED25519 + # Since this code could run in a container and not connected to the host via a venv, we must create new keys + # for the managed user and share their location with ssh. + + # Create ssh keys for the new managed user, `/tmp/{user}/*.pub` + self._create_ssh_keys("/tmp") + esc_user = re.escape(self._managed_racf_user) + public_keys = self._read_files_in_directory(f"/tmp/{esc_user}", "*.pub") + + # Append the new users key paths to the ssh/config so that ansible can find the private key for password-less connections + self._ssh_config_append() + + # The command consisting of shell and tso operations to create a user. + add_user_cmd = StringIO() + + # (1) Create group ANSIGRP for general identification of users and auto assign the UID + # Success of the command yields 'Group ANSIGRP was assigned an OMVS GID value of...' + # Failure of the command yields 'INVALID GROUP, ANSIGRP' with usually a RC 8 if the group exists. + self._managed_user_group = self._get_random_user() + + add_user_cmd.write(f"tsocmd 'ADDGROUP {self._managed_user_group} OMVS(AUTOGID)';") + add_user_cmd.write(f"echo Create {self._managed_user_group} RC=$?;") + + # (2) Add a user owned by the model_owner. Expect an error when a new TSO userid is defined since the ID can't + # receive deferred messages until the id is defined in SYS1.BRODCAST, thus the SEND message fails. + # BROADCAST DATA SET NOT USABLE+ + # I/O SYNAD ERROR + escaped_user = re.escape(self._managed_racf_user) + add_user_cmd.write(f"Creating USER '{escaped_user}' with PASSWORD {passwd} for remote host {self._remote_host};") + add_user_cmd.write(f"tsocmd ADDUSER {escaped_user} DFLTGRP\\({self._managed_user_group}\\) OWNER\\({model_owner}\\) NOPASSWORD TSO\\(ACCTNUM\\({model_tso_acctnum}\\) PROC\\({model_tso_proc}\\)\\) OMVS\\(HOME\\(/u/{escaped_user}\\) PROGRAM\\('/bin/sh'\\) AUTOUID;") + add_user_cmd.write(f"echo ADDUSER '{escaped_user}' RC=$?;") + add_user_cmd.write(f"mkdir -p /u/{escaped_user}/.ssh;") + add_user_cmd.write(f"echo mkdir '{escaped_user}' RC=$?;") + add_user_cmd.write(f"umask 0022;") + add_user_cmd.write(f"touch /u/{escaped_user}/.ssh/authorized_keys;") + add_user_cmd.write(f"echo touch authorized_keys RC=$?;") + add_user_cmd.write(f"chown -R {escaped_user} /u/{escaped_user};") + add_user_cmd.write(f"echo chown '{escaped_user}' RC=$?;") + for pub_key in public_keys: + add_user_cmd.write(f"echo {pub_key} >> /u/{escaped_user}/.ssh/authorized_keys;") + add_user_cmd.write(f"echo PUB_KEY RC=$?;") + add_user_cmd.write(f"tsocmd ALTUSER {escaped_user} PASSWORD\\({passwd}\\) NOEXPIRED;") + add_user_cmd.write(f"echo ALTUSER '{escaped_user}' RC=$?;") + + try: + cmd=f"{add_user_cmd.getvalue()}" + # need to connect with ssh -i /tmp/UPGLSFLH/id_rsa UPGLSFLH@ec01136a.vmec.svl.ibm.com + add_user_attributes = self._connect(self._remote_host, self._model_user,cmd) + + # Because this is a tsocmd run through shell, any user with a $ will be expanded and thus truncated, you can't change + # that behavior since its happening on the managed node, solution is to match a shorter pattern without the user. + is_assigned_omvs_uid = True if [v for v in add_user_attributes if f"was assigned an OMVS UID value" in v] else False + if not is_assigned_omvs_uid: + err_details = "create OMVS UID" + err_msg = f"Unable to {err_details}, this is required to create user [{self._managed_racf_user}, review output {add_user_attributes}." + raise Exception(err_msg) + + create_group_rc = [v for v in add_user_attributes if f"Create {self._managed_user_group} RC=" in v][0].split('=')[1].strip() or None + if not create_group_rc or int(create_group_rc[0]) > 8: + err_details = "create user group" + err_msg = f"Unable to {err_details}, this is required to create user [{self._managed_racf_user}, review output {add_user_attributes}." + raise Exception(err_msg) + + add_user_rc = [v for v in add_user_attributes if f"ADDUSER {escaped_user} RC=" in v][0].split('=')[1].strip() or None + if not add_user_rc or int(add_user_rc[0]) > 0: + err_details = "ADDUSER" + err_msg = f"Unable to {err_details}, this is required to create user [{self._managed_racf_user}, review output {add_user_attributes}." + raise Exception(err_msg) + + mkdir_rc = [v for v in add_user_attributes if f"mkdir {escaped_user} RC=" in v][0].split('=')[1].strip() or None + if not mkdir_rc or int(mkdir_rc[0]) > 0: + err_details = "create home directory for {escaped_user}" + err_msg = f"Unable to {err_details}, this is required to create user [{self._managed_racf_user}, review output {add_user_attributes}." + raise Exception(err_msg) + + authorized_keys_rc = [v for v in add_user_attributes if f"touch authorized_keys RC=" in v][0].split('=')[1].strip() or None + if not authorized_keys_rc or int(authorized_keys_rc[0]) > 0: + err_details = "create authorized_keys file for {escaped_user}" + err_msg = f"Unable to {err_details}, this is required to create user [{self._managed_racf_user}, review output {add_user_attributes}." + raise Exception(err_msg) + + chown_rc = [v for v in add_user_attributes if f"chown {escaped_user} RC=" in v][0].split('=')[1].strip() or None + if not chown_rc or int(chown_rc[0]) > 0: + err_details = "change ownership of home directory for {escaped_user}" + err_msg = f"Unable to {err_details}, this is required to create user [{self._managed_racf_user}, review output {add_user_attributes}." + raise Exception(err_msg) + + altuser_rc = [v for v in add_user_attributes if f"ALTUSER {escaped_user} RC=" in v][0].split('=')[1].strip() or None + if not altuser_rc or int(altuser_rc[0]) > 0: + err_details = "update password for {escaped_user}" + err_msg = f"Unable to {err_details}, this is required to create user [{self._managed_racf_user}, review output {add_user_attributes}." + raise Exception(err_msg) + except IndexError as err: + err_msg = f"Unable access the results, this is required to create user [{self._managed_racf_user}]." + raise Exception(f"{err_msg}, exception [{err}].") + except Exception as err: + raise Exception(f"The model user {self._model_user} is unable to create managed RACF user {escaped_user}, exception [{err}]") + + # Update the user according to the ManagedUserType type by invoking the mapped function pointer + self.operations[managed_user.name](self) + + return (self._managed_racf_user, passwd) + + + def _create_user_zoau_limited_access_opercmd(self) -> None: + """ + Update a managed user id for the remote node with restricted access to ZOAU + SAF Profile 'MVS.MCSOPER.ZOAU' and SAF Class OPERCMDS with universal access + authority set to UACC(NONE). With UACC as NONE, this user will be refused + access to the ZOAU opercmd utility. + + Parameters + ---------- + managed_racf_user (str) + The managed user created that will we updated according tho the ManagedUseeType selected. + + See Also + -------- + :py:class:`ManagedUserType` + :py:func:`_create_managed_user` + + Raises + ------ + Exception + If any of the remote commands return codes are out of range an exception + and the stdout and stderr is returned. + """ + saf_profile="MVS.MCSOPER.ZOAU" + saf_class="OPERCMDS" + command = StringIO() + + command.write(f"Redefining USER '{self._managed_racf_user}';") + command.write(f"tsocmd RDEFINE {saf_class} {saf_profile} UACC\\(NONE\\) AUDIT\\(ALL\\);") + command.write(f"echo RDEFINE RC=$?;") + command.write(f"tsocmd PERMIT {saf_profile} CLASS\\({saf_class}\\) ID\\({self._managed_racf_user}\\) ACCESS\\(NONE\\);") + command.write(f"echo PERMIT RC=$?;") + command.write(f"tsocmd SETROPTS RACLIST\\({saf_class}\\) REFRESH;") + command.write(f"echo SETROPTS RC=$?;") + + cmd=f"{command.getvalue()}" + results_stdout_lines = self._connect(self._remote_host, self._model_user,cmd) + + try: + # Evaluate the results + rdefine_rc = [v for v in results_stdout_lines if f"RDEFINE RC=" in v][0].split('=')[1].strip() or None + if not rdefine_rc or int(rdefine_rc[0]) > 4: + err_details = f"rdefine {saf_class} {saf_profile}" + err_msg = f"Unable to {err_details} for managed user [{self._managed_racf_user}, review output {results_stdout_lines}." + raise Exception(err_msg) + + permit_rc = [v for v in results_stdout_lines if f"PERMIT RC=" in v][0].split('=')[1].strip() or None + if not permit_rc or int(permit_rc[0]) > 4: + err_details = f"permit {saf_profile} class {saf_class}" + err_msg = f"Unable to {err_details} for managed user [{self._managed_racf_user}, review output {results_stdout_lines}." + raise Exception(err_msg) + + setropts_rc = [v for v in results_stdout_lines if f"SETROPTS RC=" in v][0].split('=')[1].strip() or None + if not setropts_rc or int(setropts_rc[0]) > 4: + err_details = f"setropts raclist {saf_class} refresh" + err_msg = f"Unable to {err_details} for managed user [{self._managed_racf_user}, review output {results_stdout_lines}." + raise Exception(err_msg) + except IndexError as err: + err_msg = f"Unable access the results, this is required to reduce permissions for user [{self._managed_racf_user}]." + raise Exception(f"{err_msg}, exception [{err}].") + except Exception as err: + raise Exception(f"The model user {self._model_user} is unable to reduce permissions RACF user {self._managed_racf_user}, exception [{err}]") + + # TODO: Implement this method in the future + def _create_user_zos_limited_hlq(self) -> None: + """ + Update a managed user id for the remote node with restricted access to + High LevelQualifiers: + - RESTRICT + - NOPERMIT + Any attempt for this user to access the HLQ will be rejected. + + Parameters + ---------- + managed_racf_user (str) + The managed user created that will we updated according tho the ManagedUseeType selected. + + See Also + -------- + :py:class:`ManagedUserType` + :py:func:`_create_managed_user` + + Raises + ------ + Exception + If any of the remote commands return codes are out of range an exception + and the stdout and stderr is returned. + """ + print("Needs to be implemented") + + + # TODO: Implement this method in the future + def _create_user_zos_limited_tmp_hlq(self) -> None: + """ + Update a managed user id for the remote node with restricted access to + temporary data set High LevelQualifiers: + - TSTRICT + - TNOPERM + Any attempt for this user to access the HLQ will be rejected. + + Parameters + ---------- + managed_racf_user (str) + The managed user created that will we updated according tho the ManagedUseeType selected. + + See Also + -------- + :py:class:`ManagedUserType` + :py:func:`_create_managed_user` + + Raises + ------ + Exception + If any of the remote commands return codes are out of range an exception + and the stdout and stderr is returned. + """ + print("Needs to be implemented") + + def _noop(self) -> None: + """ + Method intentionally takes any number of args and does nothing. + It is meant to be a NOOP function to be used as a stub with several + of the ManagedUserTypes. + """ + pass + + + """ + Function pointer mapping of ManagedUserType to functions that complete the requested + access for for the user. Simple lookup table to reduce the if/else proliferation. + """ + operations = { + ManagedUserType.ZOAU_LIMITED_ACCESS_OPERCMD.name: _create_user_zoau_limited_access_opercmd, + ManagedUserType.ZOS_LIMITED_HLQ.name: _create_user_zos_limited_hlq, + ManagedUserType.ZOS_LIMITED_TMP_HLQ.name: _create_user_zos_limited_tmp_hlq, + ManagedUserType.ZOS_BEGIN_WITH_AT_SIGN.name: _noop, + ManagedUserType.ZOS_BEGIN_WITH_POUND.name: _noop, + ManagedUserType.ZOS_RANDOM_SYMBOLS.name: _noop + } From 13076648407cab9650e77b52430c31e0ef05d0cb Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 15:13:21 -0700 Subject: [PATCH 477/495] Release updates to README Signed-off-by: ddimatos <dimatos@gmail.com> --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 27878fbac..efb0d767a 100644 --- a/README.md +++ b/README.md @@ -135,7 +135,7 @@ All releases will meet the following test criteria. * ansible-core v2.15.x * Python 3.11.x * IBM Open Enterprise SDK for Python 3.12.x -* IBM Z Open Automation Utilities (ZOAU) 1.3.1.x +* IBM Z Open Automation Utilities (ZOAU) 1.3.2.x * z/OS V2R5 ## Contributing @@ -174,8 +174,9 @@ For Galaxy and GitHub users, to see the supported ansible-core versions, review | Version | Status | Release notes | Changelogs | |----------|----------------|---------------|------------| -| 1.12.x | In development | unreleased | unreleased | -| 1.11.x | Current | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-11-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.11.0/CHANGELOG.rst) | +| 1.13.x | In development | unreleased | unreleased | +| 1.12.x | Current | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-11-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.12.0-beta.1/CHANGELOG.rst) | +| 1.11.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-11-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.11.0/CHANGELOG.rst) | | 1.10.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-10-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.10.0/CHANGELOG.rst) | | 1.9.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-9-2) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.9.2/CHANGELOG.rst) | | 1.8.x | Released | [Release notes](https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html#version-1-8-0) | [Changelogs](https://github.com/ansible-collections/ibm_zos_core/blob/v1.8.0/CHANGELOG.rst) | From 17367cfab5e0ee450a7c773d2f0b736fbff0a1e9 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 15:14:13 -0700 Subject: [PATCH 478/495] Release updates to galaxy Signed-off-by: ddimatos <dimatos@gmail.com> --- galaxy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/galaxy.yml b/galaxy.yml index deee9a6e1..074ebd8e2 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: "1.11.0" +version: "1.12.0-beta.1" # Collection README file readme: README.md From 7685c08ea6a4487c20fb10cd4adda9aa946435f9 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 15:14:56 -0700 Subject: [PATCH 479/495] Release updates to releases_maintenance Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/resources/releases_maintenance.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/source/resources/releases_maintenance.rst b/docs/source/resources/releases_maintenance.rst index a9d30942a..3978c53ab 100644 --- a/docs/source/resources/releases_maintenance.rst +++ b/docs/source/resources/releases_maintenance.rst @@ -89,6 +89,11 @@ The z/OS managed node includes several shells, currently the only supported shel +---------+----------------------------+---------------------------------------------------+---------------+---------------+ | Version | Controller | Managed Node | GA | End of Life | +=========+============================+===================================================+===============+===============+ +| 1.12.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V3Rx | TBD | TBD | +| |- `Ansible`_ >=8.0.x |- `z/OS shell`_ | | | +| |- `AAP`_ >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | +| | |- IBM `Z Open Automation Utilities`_ >=1.3.2 | | | ++---------+----------------------------+---------------------------------------------------+---------------+---------------+ | 1.11.x |- `ansible-core`_ >=2.15.x |- `z/OS`_ V2R4 - V2Rx | 1 Oct 2024 | 1 Oct 2026 | | |- `Ansible`_ >=8.0.x |- `z/OS shell`_ | | | | |- `AAP`_ >=2.4 |- IBM `Open Enterprise SDK for Python`_ | | | From d9f6196505c8b9db8f79d87c386fb1bb39e4521e Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 15:15:18 -0700 Subject: [PATCH 480/495] Release updates to meta/ibm_zos_core_meta.yml Signed-off-by: ddimatos <dimatos@gmail.com> --- meta/ibm_zos_core_meta.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index b130f6a5a..fd842636a 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.11.0" +version: "1.12.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" @@ -7,4 +7,4 @@ managed_requirements: - name: "Z Open Automation Utilities" version: - - ">=1.3.1" + - ">=1.3.2" From 44063ff768ea3f080d11dc6e81a138a4863be19a Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 15:17:42 -0700 Subject: [PATCH 481/495] Release updates to module docs Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_backup_restore.rst | 12 ++++++------ docs/source/modules/zos_blockinfile.rst | 4 ++++ docs/source/modules/zos_job_query.rst | 12 ++++++++++++ docs/source/modules/zos_mvs_raw.rst | 14 +++++++------- docs/source/modules/zos_operator.rst | 6 +++--- 5 files changed, 32 insertions(+), 16 deletions(-) diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index 9d6656ac3..25174e8a5 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -133,7 +133,7 @@ backup_name recover - Specifies if potentially recoverable errors should be ignored. + When *recover=true* and *operation=backup* then potentially recoverable errors will be ignored. | **required**: False | **type**: bool @@ -200,16 +200,16 @@ space_type hlq Specifies the new HLQ to use for the data sets being restored. - Defaults to running user's username. + If no value is provided, the data sets will be restored with their original HLQs. | **required**: False | **type**: str tmp_hlq - Override the default high level qualifier (HLQ) for temporary and backup data sets. + Override the default high level qualifier (HLQ) for temporary data sets. - The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used. + If original HLQ is not available, then the value of ``TMPHLQ`` is used. | **required**: False | **type**: str @@ -290,8 +290,8 @@ Examples space: 1 space_type: g - - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. - Use z/OS username as new HLQ. + - name: Restore data sets from a backup stored in the UNIX file /tmp/temp_backup.dzp. + Restore the data sets with the original high level qualifiers. zos_backup_restore: operation: restore backup_name: /tmp/temp_backup.dzp diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 4a61287f8..e9cd6f472 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -104,6 +104,8 @@ insertbefore marker_begin This will be inserted at ``{mark}`` in the opening ansible block marker. + Value needs to be different from marker_end. + | **required**: False | **type**: str | **default**: BEGIN @@ -112,6 +114,8 @@ marker_begin marker_end This will be inserted at ``{mark}`` in the closing ansible block marker. + Value must be different from marker_end. + | **required**: False | **type**: str | **default**: END diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 4b72dddf5..6c520b42a 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -189,6 +189,18 @@ jobs | **type**: str | **sample**: STC + system + The job entry system that MVS uses to do work. + + | **type**: str + | **sample**: STL1 + + subsystem + The job entry subsystem that MVS uses to do work. + + | **type**: str + | **sample**: STL1 + ret_code Return code output collected from job log. diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index 817951fe3..94ce37bc4 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -425,7 +425,7 @@ dds *src_encoding* and *response_encoding* are only used when *type=text*. - ``base64`` means return content in binary mode. + ``base64`` means return content as base64 encoded in binary. | **required**: True | **type**: str @@ -616,7 +616,7 @@ dds *src_encoding* and *response_encoding* are only used when *type=text*. - ``base64`` means return content in binary mode. + ``base64`` means return content as base64 encoded in binary. | **required**: True | **type**: str @@ -690,7 +690,7 @@ dds *src_encoding* and *response_encoding* are only used when *type=text*. - ``base64`` means return content in binary mode. + ``base64`` means return content as base64 encoded in binary. | **required**: True | **type**: str @@ -747,7 +747,7 @@ dds *src_encoding* and *response_encoding* are only used when *type=text*. - ``base64`` means return content in binary mode. + ``base64`` means return content as base64 encoded in binary. | **required**: True | **type**: str @@ -1164,7 +1164,7 @@ dds *src_encoding* and *response_encoding* are only used when *type=text*. - ``base64`` means return content in binary mode. + ``base64`` means return content as base64 encoded in binary. | **required**: True | **type**: str @@ -1348,7 +1348,7 @@ dds *src_encoding* and *response_encoding* are only used when *type=text*. - ``base64`` means return content in binary mode. + ``base64`` means return content as base64 encoded in binary. | **required**: True | **type**: str @@ -1415,7 +1415,7 @@ dds *src_encoding* and *response_encoding* are only used when *type=text*. - ``base64`` means return content in binary mode. + ``base64`` means return content as base64 encoded in binary. | **required**: True | **type**: str diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 5bc803962..83e430899 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -33,11 +33,11 @@ cmd For example, change the command "...,P='DSN3EPX,-DBC1,S'" to "...,P=''DSN3EPX,-DBC1,S'' ". - If the command contains any special characters ($, &, etc), they must be escaped using double backslashes like \\\\$. + If the command contains any special characters ($, &, etc), they must be escaped using double backslashes like \\\\\\$. For example, to display job by job name the command would be ``cmd:"\\$dj''HELLO''"`` - By default, the command will be converted to uppercase before execution, to control this behavior, see the \ :emphasis:`case\_sensitive`\ option below. + By default, the command will be converted to uppercase before execution, to control this behavior, see the *case_sensitive* option below. | **required**: True | **type**: str @@ -66,7 +66,7 @@ wait_time_s case_sensitive - If \ :literal:`true`\ , the command will not be converted to uppercase before execution. Instead, the casing will be preserved just as it was written in a task. + If ``true``, the command will not be converted to uppercase before execution. Instead, the casing will be preserved just as it was written in a task. | **required**: False | **type**: bool From 7ee4a36ddfa3564ab60e037624b960b3463fab39 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 16:32:53 -0700 Subject: [PATCH 482/495] Changelog generated for release Signed-off-by: ddimatos <dimatos@gmail.com> --- CHANGELOG.rst | 47 ++++++++++++++ changelogs/.plugin-cache.yaml | 2 +- changelogs/changelog.yaml | 113 ++++++++++++++++++++++++++++++++++ 3 files changed, 161 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index dab461fbf..329fd5001 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,6 +4,53 @@ ibm.ibm\_zos\_core Release Notes .. contents:: Topics +v1.12.0-beta.1 +============== + +Release Summary +--------------- + +Release Date: '2024-10-31' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__ + +Minor Changes +------------- + +- zos_backup_restore - Redefines the default behavior of module option `hlq`. When option `operation` is set to `restore` and the `hlq` is not provided, the original high level qualifiers in a backup will be used for a restore. (https://github.com/ansible-collections/ibm_zos_core/pull/1632). +- zos_job_output - Added address space type used by jobs in return JSON as `content_type`. (https://github.com/ansible-collections/ibm_zos_core/pull/1673). +- zos_job_query - Added address space type used by jobs in return JSON as `content_type`. (https://github.com/ansible-collections/ibm_zos_core/pull/1673). +- zos_job_submit - Added address space type used by jobs in return JSON as `content_type`. (https://github.com/ansible-collections/ibm_zos_core/pull/1673). +- zos_mvs_raw - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). +- zos_operator - Added new option ``case_sensitive`` to module, allowing users to control how case in a command is handled by it. (https://github.com/ansible-collections/ibm_zos_core/pull/1641) +- zos_script - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). +- zos_tso_command - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + +Bugfixes +-------- + +- zos_apf - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). +- zos_archive - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). +- zos_backup_restore - When a recoverable error was encountered and ``recover=True``, the module would ignore the option and fail. Fix now does not fail when a recoverable error is raised when ``recover=True``. (https://github.com/ansible-collections/ibm_zos_core/pull/1643). +- zos_blockinfile - Previously module was not able to delete a block when 'marker_begin' and 'marker_end' were set to the same value. Fix introduces a requirement for 'marker_begin' and 'marker_end' to have different values. (https://github.com/ansible-collections/ibm_zos_core/pull/1684). +- zos_blockinfile - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). +- zos_copy - Improve module zos_copy error handling when the user does not have universal access authority set to UACC(READ) for SAF Profile 'MVS.MCSOPER.ZOAU' and SAF Class OPERCMDS. The module now handles the exception and returns an informative message. (https://github.com/ansible-collections/ibm_zos_core/pull/1766). +- zos_copy - Previously, the module ignored the value of ``remote_tmp`` set in Ansible configuration file and used the ``/tmp/`` directory. Fix now uses the value of ``remote_tmp`` or the default value ``~/.ansible/tmp`` if none is given. (https://github.com/ansible-collections/ibm_zos_core/pull/1739). +- zos_copy - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). +- zos_data_set - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). +- zos_encode - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). +- zos_fetch - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). +- zos_job_output - RACF user names containing a ``@``, ``$``, or ``#`` raised an invalid argument error. Fix now allows the use of all valid characters for a RACF user. (https://github.com/ansible-collections/ibm_zos_core/pull/1661). +- zos_job_query - Module was not returning values for system and subsystem. Fix now returns these values. (https://github.com/ansible-collections/ibm_zos_core/pull/1761). +- zos_job_query - RACF user names containing a ``@``, ``$``, or ``#`` raised an invalid argument error. Fix now allows the use of all valid characters for a RACF user. (https://github.com/ansible-collections/ibm_zos_core/pull/1661). +- zos_lineinfile - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). +- zos_mount - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). +- zos_mvs_raw - base64 sub-option for return_content under option for retrieving DD output did not return base64. Fix now returns the base64 encoded contents of the DD. (https://github.com/ansible-collections/ibm_zos_core/pull/1691). +- zos_script - The module would discard command line arguments in a command, except for the first one. Fix now makes sure that all arguments are passed to the remote command that gets executed. (https://github.com/ansible-collections/ibm_zos_core/pull/1698). +- zos_unarchive - The ``tmp_hlq`` option was previously ignored and default values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + v1.11.0 ======= diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 6aa86eff0..6e8462a00 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -135,4 +135,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.11.0 +version: 1.12.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 064ab6d62..57f5a3928 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -398,6 +398,119 @@ releases: - 1631-enabler-zos_mount-special-character-support.yml - v1.11.0-beta.1_summary.yml release_date: '2024-08-05' + 1.12.0-beta.1: + changes: + bugfixes: + - zos_apf - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_archive - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_backup_restore - When a recoverable error was encountered and ``recover=True``, + the module would ignore the option and fail. Fix now does not fail when a + recoverable error is raised when ``recover=True``. (https://github.com/ansible-collections/ibm_zos_core/pull/1643). + - zos_blockinfile - Previously module was not able to delete a block when 'marker_begin' + and 'marker_end' were set to the same value. Fix introduces a requirement + for 'marker_begin' and 'marker_end' to have different values. (https://github.com/ansible-collections/ibm_zos_core/pull/1684). + - zos_blockinfile - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_copy - Improve module zos_copy error handling when the user does not have + universal access authority set to UACC(READ) for SAF Profile 'MVS.MCSOPER.ZOAU' + and SAF Class OPERCMDS. The module now handles the exception and returns an + informative message. (https://github.com/ansible-collections/ibm_zos_core/pull/1766). + - zos_copy - Previously, the module ignored the value of ``remote_tmp`` set + in Ansible configuration file and used the ``/tmp/`` directory. Fix now uses + the value of ``remote_tmp`` or the default value ``~/.ansible/tmp`` if none + is given. (https://github.com/ansible-collections/ibm_zos_core/pull/1739). + - zos_copy - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_data_set - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_encode - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_fetch - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_job_output - RACF user names containing a ``@``, ``$``, or ``#`` raised + an invalid argument error. Fix now allows the use of all valid characters + for a RACF user. (https://github.com/ansible-collections/ibm_zos_core/pull/1661). + - zos_job_query - Module was not returning values for system and subsystem. + Fix now returns these values. (https://github.com/ansible-collections/ibm_zos_core/pull/1761). + - zos_job_query - RACF user names containing a ``@``, ``$``, or ``#`` raised + an invalid argument error. Fix now allows the use of all valid characters + for a RACF user. (https://github.com/ansible-collections/ibm_zos_core/pull/1661). + - zos_lineinfile - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_mount - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_mvs_raw - base64 sub-option for return_content under option for retrieving + DD output did not return base64. Fix now returns the base64 encoded contents + of the DD. (https://github.com/ansible-collections/ibm_zos_core/pull/1691). + - zos_script - The module would discard command line arguments in a command, + except for the first one. Fix now makes sure that all arguments are passed + to the remote command that gets executed. (https://github.com/ansible-collections/ibm_zos_core/pull/1698). + - zos_unarchive - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + minor_changes: + - zos_backup_restore - Redefines the default behavior of module option `hlq`. + When option `operation` is set to `restore` and the `hlq` is not provided, + the original high level qualifiers in a backup will be used for a restore. + (https://github.com/ansible-collections/ibm_zos_core/pull/1632). + - zos_job_output - Added address space type used by jobs in return JSON as `content_type`. + (https://github.com/ansible-collections/ibm_zos_core/pull/1673). + - zos_job_query - Added address space type used by jobs in return JSON as `content_type`. + (https://github.com/ansible-collections/ibm_zos_core/pull/1673). + - zos_job_submit - Added address space type used by jobs in return JSON as `content_type`. + (https://github.com/ansible-collections/ibm_zos_core/pull/1673). + - zos_mvs_raw - Un-mappable chars in stdout/stderr streams are now replaced + with the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_operator - Added new option ``case_sensitive`` to module, allowing users + to control how case in a command is handled by it. (https://github.com/ansible-collections/ibm_zos_core/pull/1641) + - zos_script - Un-mappable chars in stdout/stderr streams are now replaced with + the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_tso_command - Un-mappable chars in stdout/stderr streams are now replaced + with the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + release_summary: 'Release Date: ''2024-10-31'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes <https://ibm.github.io/z_ansible_collections_doc/ibm_zos_core/docs/source/release_notes.html>`__' + fragments: + - 1632-Validate_to_restore_keep_orginial_hlq.yml + - 1633-zos_mvs_raw_tests_portability.yml + - 1634-updates-for-non-utf8-depr-warning.yml + - 1635-backup_restore_portability.yml + - 1639-zos_tso_command_portability.yml + - 1641-case-sensitivity-zos_operator.yml + - 1642-Ensure_portability_zos_encode.yml + - 1643-Validate_parameter_recover_to_tolerate_enqueue.yml + - 1647-doc-backup-restore-racf-class.yml + - 1654-zos_apf_tests_change_temphlq.yml + - 1656-zos_find_portability.yml + - 1657-test_fetch_portability.yml + - 1658-job_submit_portability.yml + - 1661-job-owner-valid-characters.yml + - 1664-portability-zos_copy.yml + - 1673-return-job-type.yml + - 1676-portability_zos_blockinfile.yml + - 1677-zos_job_query_portability.yaml + - 1684-Add_validation_for_marker_begin_end.yml + - 1687-lineinfile_portability.yml + - 1689-add-non-utf8-testcase.yml + - 1691-zos-mvs-raw-base64-mode.yml + - 1695-tmp_hlq_when_calling_mvscmd.yml + - 1698-multiple-args-zos_script.yml + - 1739-tmp_files_not_use_tmp_folder.yml + - 1761-system-subsystem-job_query.yml + - 1766-zos_copy-racf-uacc-updates.yml + - 828-adds-concurrent-executor.yml + - v1.12.0-beta.1_summary.yml + release_date: '2024-10-24' 1.2.1: changes: bugfixes: From f1482d340dbef7861f35eced2562584613790483 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 22:33:37 -0700 Subject: [PATCH 483/495] Update release notes Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 148 +++++++++++++++++++++++++++++++--- 1 file changed, 137 insertions(+), 11 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index e2ee60586..01df036c5 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,6 +6,86 @@ Releases ======== +Version 1.12.0-beta.1 +===================== + +Minor Changes +------------- + +- ``zos_backup_restore`` - default behavior for module option **hlq** changed. When option **operation** is set to **restore** and the **hlq** is not provided, the original high level qualifiers in a backup will be used for a restore. + +- ``zos_job_output`` - has added the address space type for a job returned as **content_type** in the module response. + +- ``zos_job_query`` - has added the address space type for a job returned as **content_type** in the module response. + +- ``zos_job_submit`` - has added the address space type for a job returned as **content_type** in the module response. + +- ``zos_mvs_raw`` - updates the stdout and stderr when an unknown, unrecognized, or unrepresentable characters with the 'replacement character' (�), found in the Unicode standard at code point U+FFFD. + +- ``zos_operator`` - has added the option **case_sensitive**, allowing the module to control the commands case. + +- ``zos_script`` - updates the stdout and stderr when an unknown, unrecognized, or unrepresentable characters with the 'replacement character' (�), found in the Unicode standard at code point U+FFFD. + +- ``zos_tso_command`` - updates the stdout and stderr when an unknown, unrecognized, or unrepresentable characters with the 'replacement character' (�), found in the Unicode standard at code point U+FFFD. + +Bugfixes +-------- + +- ``zos_apf`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. + +- ``zos_archive`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. + +- ``zos_backup_restore`` - when a recoverable error was encountered and **recover = True**, the module would fail. The change now allows the module to recover. + +- ``zos_blockinfile`` - when the modules **marker_begin** and **marker_end** were set to the same value, the module would not delete the block. Now the module requires the **marker_begin** and **marker_end** to have different values. + +- ``zos_blockinfile`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option.. + +- ``zos_copy`` + + - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. + - module would fail if the user did not have Universal Access Authority for SAF Profile **MVS.MCSOPER.ZOAU** and SAF Class **OPERCMDS**. Now the module handles the exception and returns an informative message. + - module would ignore the value set for **remote_tmp** in the Ansible configuration file. Now the module uses the value of **remote_tmp** or the default value **~/.ansible/tmp** if none is given. + +- ``zos_data_set`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. + +- ``zos_encode`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. + +- ``zos_fetch`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. + +- ``zos_job_output`` - module would raise an invalid argument error for a user ID that contained **@**, **$**, or **#**. Now the module supports RACF user naming conventions. + +- ``zos_job_query`` - module did not return values for properties **system** and **subsystem**. Now the module returns these values. +- ``zos_job_query`` - module would raise an invalid argument error for a user ID that contained **@**, **$**, or **#**. Now the module supports RACF user naming conventions. + +- ``zos_lineinfile`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. + +- ``zos_mount`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. + +- ``zos_mvs_raw`` - module sub-option **base64** for **return_content** did not retrieve DD output as Base64. Now the module returns Base64 encoded contents for the DD. + +- ``zos_script`` - module would only read the first command line argument if more than one was used. Now the module passes all arguments to the remote command. + +- ``zos_unarchive`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. + +Availability +------------ + +* `Galaxy`_ +* `GitHub`_ + +Requirements +------------ + +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +control node and z/OS managed node dependencies. + +Known Issues +------------ +- ``zos_job_submit`` - when setting 'location' to 'local' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. +- ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. +- ``zos_apf`` - When trying to remove a library that contains the '$' character in the name for an APF(authorized program facility), the operation will fail. + Version 1.11.0 ============== @@ -96,7 +176,7 @@ Bugfixes Availability ------------ -* `Automation Hub`_ +* `Ansible Automation Platform`_ * `Galaxy`_ * `GitHub`_ @@ -110,7 +190,7 @@ Known Issues ------------ - ``zos_job_submit`` - when setting 'location' to 'local' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. - ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. -- ``zos_apf`` - When trying to remove a library that contains the '$' character in the name from APF(authorized program facility), operation will fail. +- ``zos_apf`` - When trying to remove a library that contains the '$' character in the name for an APF(authorized program facility), the operation will fail. Version 1.10.0 ============== @@ -201,7 +281,7 @@ It is intended to assist in updating your playbooks so this collection will cont Availability ------------ -* `Automation Hub`_ +* `Ansible Automation Platform`_ * `Galaxy`_ * `GitHub`_ @@ -230,6 +310,52 @@ Known Issues - In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. - Use of special characters (#, @, $, \- ) in different options like data set names and commands is not fully supported, some modules support them but is the user responsibility to escape them. Read each module documentation for further details. +Version 1.9.3 +============= + +Bugfixes +-------- + +- ``zos_job_submit`` - module did not return values for properties **system** and **subsystem**. Now the module returns these values. +- ``zos_mvs_raw`` + + - If a program failed with a non-zero return code and verbose was false, the module would succeed. Whereas, if the program failed and verbose was true the module would fail. Fix now has a consistent behavior and fails in both cases. + - Module would obfuscate the return code from the program when failing returning 8 instead. Fix now returns the proper return code from the program. + +Availability +------------ + +* `Ansible Automation Platform`_ +* `Galaxy`_ +* `GitHub`_ + +Requirements +------------ + +The IBM z/OS core collection has several dependencies, please review the `z/OS core support matrix`_ to understand both the +controller and z/OS managed node dependencies. + +Known Issues +------------ + +- ``zos_job_submit`` - when setting 'location' to 'LOCAL' and not specifying the from and to encoding, the modules defaults are not read leaving the file in its original encoding; explicitly set the encodings instead of relying on the default. +- ``zos_job_submit`` - when submitting JCL, the response value returned for **byte_count** is incorrect. + +- ``zos_job_submit``, ``zos_job_output``, ``zos_operator_action_query`` - encounters UTF-8 decoding errors when interacting with results that contain non-printable UTF-8 characters in the response. This has been addressed in this release and corrected with **ZOAU version 1.2.5.6** or later. + + - If the appropriate level of ZOAU can not be installed, some options are to: + + - Specify that the ASA assembler option be enabled to instruct the assembler to use ANSI control characters instead of machine code control characters. + - Ignore module errors by using **ignore_errors:true** for a specific playbook task. + - If the error is resulting from a batch job, add **ignore_errors:true** to the task and capture the output into a registered variable to extract the + job ID with a regular expression. Then use ``zos_job_output`` to display the DD without the non-printable character such as the DD **JESMSGLG**. + - If the error is the result of a batch job, set option **return_output** to false so that no DDs are read which could contain the non-printable UTF-8 characters. + +- ``zos_data_set`` - An undocumented option **size** was defined in module **zos_data_set**, this has been removed to satisfy collection certification, use the intended and documented **space_primary** option. + +- In the past, choices could be defined in either lower or upper case. Now, only the case that is identified in the docs can be set, this is so that the collection can continue to maintain certified status. + + Version 1.9.2 ============= @@ -241,7 +367,7 @@ Bugfixes Availability ------------ -* `Automation Hub`_ +* `Ansible Automation Platform`_ * `Galaxy`_ * `GitHub`_ @@ -301,7 +427,7 @@ Known Issues Availability ------------ -* `Automation Hub`_ +* `Ansible Automation Platform`_ * `Galaxy`_ * `GitHub`_ @@ -423,7 +549,7 @@ and documented **space_primary** option. Availability ------------ -* `Automation Hub`_ +* `Ansible Automation Platform`_ * `Galaxy`_ * `GitHub`_ @@ -507,7 +633,7 @@ unique, some options to work around the error are below. Availability ------------ -* `Automation Hub`_ +* `Ansible Automation Platform`_ * `Galaxy`_ * `GitHub`_ @@ -572,7 +698,7 @@ Bugfixes Availability ------------ -* `Automation Hub`_ +* `Ansible Automation Platform`_ * `Galaxy`_ * `GitHub`_ @@ -633,7 +759,7 @@ Bugfixes Availability ------------ -* `Automation Hub`_ +* `Ansible Automation Platform`_ * `Galaxy`_ * `GitHub`_ @@ -747,7 +873,7 @@ Deprecated Features Availability ------------ -* `Automation Hub`_ +* `Ansible Automation Platform`_ * `Galaxy`_ * `GitHub`_ @@ -764,7 +890,7 @@ controller and z/OS managed node dependencies. https://github.com/ansible-collections/ibm_zos_core .. _Galaxy: https://galaxy.ansible.com/ibm/ibm_zos_core -.. _Automation Hub: +.. _Ansible Automation Platform: https://www.ansible.com/products/automation-hub .. _IBM Open Enterprise SDK for Python: https://www.ibm.com/products/open-enterprise-python-zos From 5792d5c1d5d9690b6eaeb8a14cca675b93a4c1fa Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 22:44:50 -0700 Subject: [PATCH 484/495] update release notes formatting Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 01df036c5..74916ee33 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -55,8 +55,9 @@ Bugfixes - ``zos_job_output`` - module would raise an invalid argument error for a user ID that contained **@**, **$**, or **#**. Now the module supports RACF user naming conventions. -- ``zos_job_query`` - module did not return values for properties **system** and **subsystem**. Now the module returns these values. -- ``zos_job_query`` - module would raise an invalid argument error for a user ID that contained **@**, **$**, or **#**. Now the module supports RACF user naming conventions. +- ``zos_job_query`` + - module did not return values for properties **system** and **subsystem**. Now the module returns these values. + - module would raise an invalid argument error for a user ID that contained **@**, **$**, or **#**. Now the module supports RACF user naming conventions. - ``zos_lineinfile`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option. From e15832de2a10918f8502b57d0d4d615a802392a1 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 22:46:24 -0700 Subject: [PATCH 485/495] Update modules content_type to share address space types Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_job_output.py | 8 +++++++- plugins/modules/zos_job_query.py | 8 +++++++- plugins/modules/zos_job_submit.py | 8 +++++++- 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 986578f81..986d9f1da 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -113,7 +113,13 @@ sample: content_type: description: - Type of address space used by the job. + - Type of address space used by the job, can be one of the following types. + - APPC for a APPC Initiator. + - JGRP for a JOBGROUP. + - JOB for a Batch job. + - STC for a Started task. + - TSU for a Time sharing user. + - ? for an unknown or pending. type: str sample: JOB creation_date: diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index 110c3554e..a4bc5c45c 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -121,7 +121,13 @@ sample: JOB01427 content_type: description: - Type of address space used by the job. + - Type of address space used by the job, can be one of the following types. + - APPC for a APPC Initiator. + - JGRP for a JOBGROUP. + - JOB for a Batch job. + - STC for a Started task. + - TSU for a Time sharing user. + - ? for an unknown or pending. type: str sample: STC system: diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index ce472e266..f6850a8ac 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -160,7 +160,13 @@ sample: HELLO content_type: description: - Type of address space used by the job. + - Type of address space used by the job, can be one of the following types. + - APPC for a APPC Initiator. + - JGRP for a JOBGROUP. + - JOB for a Batch job. + - STC for a Started task. + - TSU for a Time sharing user. + - ? for an unknown or pending. type: str sample: STC duration: From 341cc747db84cc98852c54623669f17f70098fb8 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 23:05:25 -0700 Subject: [PATCH 486/495] Update and escape the content_type Signed-off-by: ddimatos <dimatos@gmail.com> --- plugins/modules/zos_job_output.py | 4 ++-- plugins/modules/zos_job_query.py | 2 +- plugins/modules/zos_job_submit.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 986d9f1da..09a54380b 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -113,13 +113,13 @@ sample: content_type: description: - - Type of address space used by the job, can be one of the following types. + Type of address space used by the job, can be one of the following types. - APPC for a APPC Initiator. - JGRP for a JOBGROUP. - JOB for a Batch job. - STC for a Started task. - TSU for a Time sharing user. - - ? for an unknown or pending. + - \? for an unknown or pending. type: str sample: JOB creation_date: diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index a4bc5c45c..5337ad9cb 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -127,7 +127,7 @@ - JOB for a Batch job. - STC for a Started task. - TSU for a Time sharing user. - - ? for an unknown or pending. + - \? for an unknown or pending. type: str sample: STC system: diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index f6850a8ac..a56a0bc58 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -166,7 +166,7 @@ - JOB for a Batch job. - STC for a Started task. - TSU for a Time sharing user. - - ? for an unknown or pending. + - \? for an unknown or pending. type: str sample: STC duration: From bd857e1582d405bba1454690e3ff149eac775d95 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Thu, 24 Oct 2024 23:07:02 -0700 Subject: [PATCH 487/495] udpate RST based on module doc changes Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_job_output.rst | 2 +- docs/source/modules/zos_job_query.rst | 14 +++++++++++++- docs/source/modules/zos_job_submit.rst | 14 +++++++++++++- 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index f31ff8362..bd4b66369 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -265,7 +265,7 @@ jobs | **type**: str content_type - Type of address space used by the job. + Type of address space used by the job, can be one of the following types. - APPC for a APPC Initiator. - JGRP for a JOBGROUP. - JOB for a Batch job. - STC for a Started task. - TSU for a Time sharing user. - \? for an unknown or pending. | **type**: str | **sample**: JOB diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 6c520b42a..7046818a1 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -184,7 +184,19 @@ jobs | **sample**: JOB01427 content_type - Type of address space used by the job. + Type of address space used by the job, can be one of the following types. + + APPC for a APPC Initiator. + + JGRP for a JOBGROUP. + + JOB for a Batch job. + + STC for a Started task. + + TSU for a Time sharing user. + + \? for an unknown or pending. | **type**: str | **sample**: STC diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 4244b78da..ee79cdc58 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -593,7 +593,19 @@ jobs | **sample**: HELLO content_type - Type of address space used by the job. + Type of address space used by the job, can be one of the following types. + + APPC for a APPC Initiator. + + JGRP for a JOBGROUP. + + JOB for a Batch job. + + STC for a Started task. + + TSU for a Time sharing user. + + \? for an unknown or pending. | **type**: str | **sample**: STC From be08989b9807c7fa9b6e800460a6c9f67d724b9a Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 27 Oct 2024 14:58:24 -0700 Subject: [PATCH 488/495] Remove fragments before releasing Signed-off-by: ddimatos <dimatos@gmail.com> --- .../fragments/1032-clean-job_submit-test.yml | 3 - ...-lineinfile-remove-zos_copy-dependency.yml | 3 - .../1156-zos_archive-remove-zos_copy_dep.yml | 3 - ...-remove-zos-copy-from-zos-encode-tests.yml | 3 - ...165-remove-zos-copy-dep-from-zos-fetch.yml | 3 - ...ve-zos-copy-from-zos-blockinfile-tests.yml | 3 - .../1169-util-job-zoau-migration.yml | 3 - ...hancememt-make-pipeline-217-compatible.yml | 3 - ...e-zos_encode-from_zos_lineinfile-tests.yml | 3 - .../1181-zoau-migration-zos_operator.yml | 4 - .../1182-migrate-module-utils-data-set.yml | 3 - changelogs/fragments/1183-copy-members.yml | 3 - ...184-remove-zos-fetch-dep-from-zos-copy.yml | 3 - .../1187-migrate-module-utils-copy.yml | 3 - .../1188-migrate-module_utils-backup.yml | 3 - .../1189-migrate-module_utils-encode.yml | 3 - ...1190-migrate-module_utils-dd_statement.yml | 3 - .../1196-zoau-migration-zos_gather_facts.yml | 4 - .../1202-doc-gen-script-portability.yml | 4 - changelogs/fragments/1204-migrate-zos_apf.yml | 12 --- .../1209-zoau-migration-zos_job_submit.yml | 3 - ...1215-Migrate_zos_operator_action_query.yml | 4 - ...lidate_module_zos_job_output_migration.yml | 3 - .../fragments/1217-validate-job-query.yml | 3 - .../fragments/1218-migrate-zos_encode.yml | 3 - ...20-bugfix-zos_job_submit-default_value.yml | 4 - .../1222-zoau-migration-zos_copy.yml | 3 - .../fragments/1227-migrate-zos_archive.yml | 3 - ...228-zos_find-remove-zos_lineinfile_dep.yml | 3 - .../fragments/1229-migrate-zos_fetch.yml | 3 - .../fragments/1237-migrate-zos_mount.yml | 4 - .../fragments/1238-migrate-zos_unarchive.yml | 3 - .../1242-zoau-migration-zos_data_set.yml | 3 - ...Migrate_zos_blockinfile_and_lineinfile.yml | 4 - .../fragments/1257-zoau-import-zos_apf.yml | 3 - .../1261-job-submit-non-utf8-chars.yml | 9 -- .../1265_Migrate_zos_backup_restore.yml | 7 -- ...0-quick-fix-len-of-volumes-work-around.yml | 5 -- ...-update-zos_archive-zos_unarchive-docs.yml | 5 -- .../1292-doc-zos_tso_command-example.yml | 4 - .../fragments/1295-doc-zos_ping-scp.yml | 7 -- ...98-Remove_local_charset_from_zos_fetch.yml | 3 - .../fragments/1307-update-sanity-zos_copy.yml | 10 --- .../1320-Zos_mvs_raw_ignores_tmp_hlq.yml | 5 -- .../1322-update-docstring-encode.yml | 3 - .../1323-Update_docstring-dd_statement.yml | 3 - .../1331-update-docstring-ickdsf.yml | 3 - .../1332-update-docstring-import_handler.yml | 3 - .../fragments/1333-update-docstring-job.yml | 3 - .../1334-update-docstring-mcs_cmd.yml | 3 - .../1335-update-docstring-template.yml | 3 - .../1336-update-docstring-validation.yml | 3 - .../fragments/1337-update-docstring-vtoc.yml | 3 - ...-update-docstring-zoau_version_checker.yml | 3 - .../1340-Work_around_fix_false_positive.yml | 4 - ...42-update-docstring-zos_backup_restore.yml | 3 - .../1343-update-docstring-zos_blockinline.yml | 3 - .../1344-update-docstring-zos_copy.yml | 3 - ...re_than_0_doesn_not_put_change_as_true.yml | 5 -- .../1347-update-docstring-zos_data_set.yml | 3 - .../1348-update-docstring-zos_encode.yml | 3 - .../1349-update-docstring-zos_fetch.yml | 3 - .../1350-update-docstring-zos_find.yml | 3 - ...1351-update-docstring-zos_gather_facts.yml | 3 - .../1352-update-docstring-zos_job_output.yml | 3 - .../1353-update-docstring-zos_job_query.yml | 3 - .../1354-update-docstring-zos_job_submit.yml | 3 - .../1355-update-docstring-zos_lineinfile.yml | 3 - .../1356-update-docstring-zos_mount.yml | 3 - .../1361-update-docstring-zos_operator.yml | 3 - .../fragments/1362-update-docstring-file.yml | 3 - .../1363-update-docstring-system.yml | 3 - ...ncement-zos-find-gdg-gds-special-chars.yml | 3 - .../1380-enhancement-add-sybols-zos_apf.yml | 3 - .../1384-update-docstring-backup.yml | 3 - ...385-update-docstring-better_arg_parser.yml | 3 - .../fragments/1386-gdg-symbols-support.yml | 3 - .../fragments/1387-update-docstring-copy.yml | 3 - .../fragments/1388-lowercase-choices.yml | 87 ------------------- .../1390-update-docstring-zos_script.yml | 3 - .../1391-update-docstring-zos_tso_command.yml | 3 - .../1392-update-docstring-zos_volume_init.yml | 3 - .../1393-update-docstring-zos_apf.yml | 3 - ...te_docstring-zos_operator_action_query.yml | 3 - .../1415-Update_docstring-zos_archive.yml | 3 - .../fragments/1443-zos_find-filter-size.yml | 4 - .../fragments/1470-redesign_mvs_raw.yml | 3 - .../1484-update-ac-tool-ansible-lint.yml | 4 - .../1488-zos_copy-refactor-force.yml | 4 - .../1495-default-values-data-set-class.yml | 4 - changelogs/fragments/1496-fix-gds-resolve.yml | 4 - .../1497-gdg-support-zos-job-submit.yml | 4 - .../1504-zos_data_set-gdg-support.yml | 5 -- .../fragments/1507-zos_operator-docs.yml | 3 - ...1511-zos_archive_unarchive-gdg-support.yml | 7 -- .../1512-bugfix-zos_job_submit-error-type.yml | 5 -- .../fragments/1515-gdg_batch_creation.yml | 4 - ..._gdgsgds_and_special_character_support.yml | 9 -- .../fragments/1519-zos_fetch-gdg-support.yml | 4 - ..._raw_support_gdg_gds_special_character.yml | 4 - changelogs/fragments/1527-zos_backup-gdg.yml | 5 -- .../fragments/1531-zos_encode_gdg_support.yml | 4 - ...l-data_set-function-data_set_cataloged.yml | 5 -- ...vs_raw_gds_positive_was_false_positive.yml | 3 - ...vs_raw_fix_verbose_and_first_character.yml | 4 - .../1550-lower_case_idcams_utility.yml | 3 - .../fragments/1552-readme-support-updates.yml | 10 --- .../fragments/1553-Console_parallel.yml | 3 - .../fragments/1561-remove_deep_copy.yml | 10 --- .../1563-zos_tso_command-gdg-support.yml | 4 - .../fragments/1564-zos_copy_gdg_support.yml | 4 - .../1565-remove-deprecated-pipes-library.yml | 11 --- .../1570-compute-member-name-zos_copy.yml | 6 -- .../fragments/1623-zos_copy-avoid-opercmd.yml | 5 -- ...er-zos_mount-special-character-support.yml | 4 - ...-Validate_to_restore_keep_orginial_hlq.yml | 5 -- .../1633-zos_mvs_raw_tests_portability.yml | 4 - ...1634-updates-for-non-utf8-depr-warning.yml | 25 ------ .../1635-backup_restore_portability.yml | 3 - .../1639-zos_tso_command_portability.yml | 3 - ...uick-fix-ansible-core:2.17-verbosity-issue | 7 -- .../1641-case-sensitivity-zos_operator.yml | 4 - .../1642-Ensure_portability_zos_encode.yml | 4 - ..._parameter_recover_to_tolerate_enqueue.yml | 5 -- .../1647-doc-backup-restore-racf-class.yml | 5 -- .../1654-zos_apf_tests_change_temphlq.yml | 4 - .../fragments/1656-zos_find_portability.yml | 3 - .../fragments/1657-test_fetch_portability.yml | 3 - .../fragments/1658-job_submit_portability.yml | 3 - .../1661-job-owner-valid-characters.yml | 9 -- .../fragments/1664-portability-zos_copy.yml | 11 --- changelogs/fragments/1673-return-job-type.yml | 10 --- .../1676-portability_zos_blockinfile.yml | 3 - .../1677-zos_job_query_portability.yaml | 3 - ...84-Add_validation_for_marker_begin_end.yml | 4 - .../fragments/1687-lineinfile_portability.yml | 3 - .../fragments/1689-add-non-utf8-testcase.yml | 4 - .../1691-zos-mvs-raw-base64-mode.yml | 4 - .../1695-tmp_hlq_when_calling_mvscmd.yml | 31 ------- .../1698-multiple-args-zos_script.yml | 5 -- .../1739-tmp_files_not_use_tmp_folder.yml | 5 -- .../1761-system-subsystem-job_query.yml | 3 - .../1766-zos_copy-racf-uacc-updates.yml | 12 --- .../fragments/692-changelog-lint-ac-tool.yml | 8 -- .../828-adds-concurrent-executor.yml | 7 -- .../971-bug-job_submit-can-stacktrace.yml | 6 -- changelogs/fragments/992-fix-sanity4to6.yml | 7 -- 147 files changed, 748 deletions(-) delete mode 100644 changelogs/fragments/1032-clean-job_submit-test.yml delete mode 100644 changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml delete mode 100644 changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml delete mode 100644 changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml delete mode 100644 changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml delete mode 100644 changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml delete mode 100644 changelogs/fragments/1169-util-job-zoau-migration.yml delete mode 100644 changelogs/fragments/1170-enhancememt-make-pipeline-217-compatible.yml delete mode 100644 changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml delete mode 100644 changelogs/fragments/1181-zoau-migration-zos_operator.yml delete mode 100644 changelogs/fragments/1182-migrate-module-utils-data-set.yml delete mode 100644 changelogs/fragments/1183-copy-members.yml delete mode 100644 changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml delete mode 100644 changelogs/fragments/1187-migrate-module-utils-copy.yml delete mode 100644 changelogs/fragments/1188-migrate-module_utils-backup.yml delete mode 100644 changelogs/fragments/1189-migrate-module_utils-encode.yml delete mode 100644 changelogs/fragments/1190-migrate-module_utils-dd_statement.yml delete mode 100644 changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml delete mode 100644 changelogs/fragments/1202-doc-gen-script-portability.yml delete mode 100644 changelogs/fragments/1204-migrate-zos_apf.yml delete mode 100644 changelogs/fragments/1209-zoau-migration-zos_job_submit.yml delete mode 100644 changelogs/fragments/1215-Migrate_zos_operator_action_query.yml delete mode 100644 changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml delete mode 100644 changelogs/fragments/1217-validate-job-query.yml delete mode 100644 changelogs/fragments/1218-migrate-zos_encode.yml delete mode 100644 changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml delete mode 100644 changelogs/fragments/1222-zoau-migration-zos_copy.yml delete mode 100644 changelogs/fragments/1227-migrate-zos_archive.yml delete mode 100644 changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml delete mode 100644 changelogs/fragments/1229-migrate-zos_fetch.yml delete mode 100644 changelogs/fragments/1237-migrate-zos_mount.yml delete mode 100644 changelogs/fragments/1238-migrate-zos_unarchive.yml delete mode 100644 changelogs/fragments/1242-zoau-migration-zos_data_set.yml delete mode 100644 changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml delete mode 100644 changelogs/fragments/1257-zoau-import-zos_apf.yml delete mode 100644 changelogs/fragments/1261-job-submit-non-utf8-chars.yml delete mode 100644 changelogs/fragments/1265_Migrate_zos_backup_restore.yml delete mode 100644 changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml delete mode 100644 changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml delete mode 100644 changelogs/fragments/1292-doc-zos_tso_command-example.yml delete mode 100644 changelogs/fragments/1295-doc-zos_ping-scp.yml delete mode 100644 changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml delete mode 100644 changelogs/fragments/1307-update-sanity-zos_copy.yml delete mode 100644 changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml delete mode 100644 changelogs/fragments/1322-update-docstring-encode.yml delete mode 100644 changelogs/fragments/1323-Update_docstring-dd_statement.yml delete mode 100644 changelogs/fragments/1331-update-docstring-ickdsf.yml delete mode 100644 changelogs/fragments/1332-update-docstring-import_handler.yml delete mode 100644 changelogs/fragments/1333-update-docstring-job.yml delete mode 100644 changelogs/fragments/1334-update-docstring-mcs_cmd.yml delete mode 100644 changelogs/fragments/1335-update-docstring-template.yml delete mode 100644 changelogs/fragments/1336-update-docstring-validation.yml delete mode 100644 changelogs/fragments/1337-update-docstring-vtoc.yml delete mode 100644 changelogs/fragments/1338-update-docstring-zoau_version_checker.yml delete mode 100644 changelogs/fragments/1340-Work_around_fix_false_positive.yml delete mode 100644 changelogs/fragments/1342-update-docstring-zos_backup_restore.yml delete mode 100644 changelogs/fragments/1343-update-docstring-zos_blockinline.yml delete mode 100644 changelogs/fragments/1344-update-docstring-zos_copy.yml delete mode 100644 changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml delete mode 100644 changelogs/fragments/1347-update-docstring-zos_data_set.yml delete mode 100644 changelogs/fragments/1348-update-docstring-zos_encode.yml delete mode 100644 changelogs/fragments/1349-update-docstring-zos_fetch.yml delete mode 100644 changelogs/fragments/1350-update-docstring-zos_find.yml delete mode 100644 changelogs/fragments/1351-update-docstring-zos_gather_facts.yml delete mode 100644 changelogs/fragments/1352-update-docstring-zos_job_output.yml delete mode 100644 changelogs/fragments/1353-update-docstring-zos_job_query.yml delete mode 100644 changelogs/fragments/1354-update-docstring-zos_job_submit.yml delete mode 100644 changelogs/fragments/1355-update-docstring-zos_lineinfile.yml delete mode 100644 changelogs/fragments/1356-update-docstring-zos_mount.yml delete mode 100644 changelogs/fragments/1361-update-docstring-zos_operator.yml delete mode 100644 changelogs/fragments/1362-update-docstring-file.yml delete mode 100644 changelogs/fragments/1363-update-docstring-system.yml delete mode 100644 changelogs/fragments/1374-enhancement-zos-find-gdg-gds-special-chars.yml delete mode 100644 changelogs/fragments/1380-enhancement-add-sybols-zos_apf.yml delete mode 100644 changelogs/fragments/1384-update-docstring-backup.yml delete mode 100644 changelogs/fragments/1385-update-docstring-better_arg_parser.yml delete mode 100644 changelogs/fragments/1386-gdg-symbols-support.yml delete mode 100644 changelogs/fragments/1387-update-docstring-copy.yml delete mode 100644 changelogs/fragments/1388-lowercase-choices.yml delete mode 100644 changelogs/fragments/1390-update-docstring-zos_script.yml delete mode 100644 changelogs/fragments/1391-update-docstring-zos_tso_command.yml delete mode 100644 changelogs/fragments/1392-update-docstring-zos_volume_init.yml delete mode 100644 changelogs/fragments/1393-update-docstring-zos_apf.yml delete mode 100644 changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml delete mode 100644 changelogs/fragments/1415-Update_docstring-zos_archive.yml delete mode 100644 changelogs/fragments/1443-zos_find-filter-size.yml delete mode 100644 changelogs/fragments/1470-redesign_mvs_raw.yml delete mode 100644 changelogs/fragments/1484-update-ac-tool-ansible-lint.yml delete mode 100644 changelogs/fragments/1488-zos_copy-refactor-force.yml delete mode 100644 changelogs/fragments/1495-default-values-data-set-class.yml delete mode 100644 changelogs/fragments/1496-fix-gds-resolve.yml delete mode 100644 changelogs/fragments/1497-gdg-support-zos-job-submit.yml delete mode 100644 changelogs/fragments/1504-zos_data_set-gdg-support.yml delete mode 100644 changelogs/fragments/1507-zos_operator-docs.yml delete mode 100644 changelogs/fragments/1511-zos_archive_unarchive-gdg-support.yml delete mode 100644 changelogs/fragments/1512-bugfix-zos_job_submit-error-type.yml delete mode 100644 changelogs/fragments/1515-gdg_batch_creation.yml delete mode 100644 changelogs/fragments/1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml delete mode 100644 changelogs/fragments/1519-zos_fetch-gdg-support.yml delete mode 100644 changelogs/fragments/1525-mvs_raw_support_gdg_gds_special_character.yml delete mode 100644 changelogs/fragments/1527-zos_backup-gdg.yml delete mode 100644 changelogs/fragments/1531-zos_encode_gdg_support.yml delete mode 100644 changelogs/fragments/1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml delete mode 100644 changelogs/fragments/1541-output_mvs_raw_gds_positive_was_false_positive.yml delete mode 100644 changelogs/fragments/1543-mvs_raw_fix_verbose_and_first_character.yml delete mode 100644 changelogs/fragments/1550-lower_case_idcams_utility.yml delete mode 100644 changelogs/fragments/1552-readme-support-updates.yml delete mode 100644 changelogs/fragments/1553-Console_parallel.yml delete mode 100644 changelogs/fragments/1561-remove_deep_copy.yml delete mode 100644 changelogs/fragments/1563-zos_tso_command-gdg-support.yml delete mode 100644 changelogs/fragments/1564-zos_copy_gdg_support.yml delete mode 100644 changelogs/fragments/1565-remove-deprecated-pipes-library.yml delete mode 100644 changelogs/fragments/1570-compute-member-name-zos_copy.yml delete mode 100644 changelogs/fragments/1623-zos_copy-avoid-opercmd.yml delete mode 100644 changelogs/fragments/1631-enabler-zos_mount-special-character-support.yml delete mode 100644 changelogs/fragments/1632-Validate_to_restore_keep_orginial_hlq.yml delete mode 100644 changelogs/fragments/1633-zos_mvs_raw_tests_portability.yml delete mode 100644 changelogs/fragments/1634-updates-for-non-utf8-depr-warning.yml delete mode 100644 changelogs/fragments/1635-backup_restore_portability.yml delete mode 100644 changelogs/fragments/1639-zos_tso_command_portability.yml delete mode 100644 changelogs/fragments/1640-quick-fix-ansible-core:2.17-verbosity-issue delete mode 100644 changelogs/fragments/1641-case-sensitivity-zos_operator.yml delete mode 100644 changelogs/fragments/1642-Ensure_portability_zos_encode.yml delete mode 100644 changelogs/fragments/1643-Validate_parameter_recover_to_tolerate_enqueue.yml delete mode 100644 changelogs/fragments/1647-doc-backup-restore-racf-class.yml delete mode 100644 changelogs/fragments/1654-zos_apf_tests_change_temphlq.yml delete mode 100644 changelogs/fragments/1656-zos_find_portability.yml delete mode 100644 changelogs/fragments/1657-test_fetch_portability.yml delete mode 100644 changelogs/fragments/1658-job_submit_portability.yml delete mode 100644 changelogs/fragments/1661-job-owner-valid-characters.yml delete mode 100644 changelogs/fragments/1664-portability-zos_copy.yml delete mode 100644 changelogs/fragments/1673-return-job-type.yml delete mode 100644 changelogs/fragments/1676-portability_zos_blockinfile.yml delete mode 100644 changelogs/fragments/1677-zos_job_query_portability.yaml delete mode 100644 changelogs/fragments/1684-Add_validation_for_marker_begin_end.yml delete mode 100644 changelogs/fragments/1687-lineinfile_portability.yml delete mode 100644 changelogs/fragments/1689-add-non-utf8-testcase.yml delete mode 100644 changelogs/fragments/1691-zos-mvs-raw-base64-mode.yml delete mode 100644 changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml delete mode 100644 changelogs/fragments/1698-multiple-args-zos_script.yml delete mode 100644 changelogs/fragments/1739-tmp_files_not_use_tmp_folder.yml delete mode 100644 changelogs/fragments/1761-system-subsystem-job_query.yml delete mode 100644 changelogs/fragments/1766-zos_copy-racf-uacc-updates.yml delete mode 100644 changelogs/fragments/692-changelog-lint-ac-tool.yml delete mode 100644 changelogs/fragments/828-adds-concurrent-executor.yml delete mode 100644 changelogs/fragments/971-bug-job_submit-can-stacktrace.yml delete mode 100644 changelogs/fragments/992-fix-sanity4to6.yml diff --git a/changelogs/fragments/1032-clean-job_submit-test.yml b/changelogs/fragments/1032-clean-job_submit-test.yml deleted file mode 100644 index bb4248aec..000000000 --- a/changelogs/fragments/1032-clean-job_submit-test.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test_zos_job_submit_func.py - Removed test setting that was covering a missing duration value. - (https://github.com/ansible-collections/ibm_zos_core/pull/1364). diff --git a/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml b/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml deleted file mode 100644 index 44015bbd9..000000000 --- a/changelogs/fragments/1152-zos-lineinfile-remove-zos_copy-dependency.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile - remove zos_copy calls from test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1152). diff --git a/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml b/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml deleted file mode 100644 index ea8aacee9..000000000 --- a/changelogs/fragments/1156-zos_archive-remove-zos_copy_dep.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - Remove zos_copy dependency from zos_archive test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1156). \ No newline at end of file diff --git a/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml b/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml deleted file mode 100644 index 24f2802d5..000000000 --- a/changelogs/fragments/1157-remove-zos-copy-from-zos-encode-tests.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_encode - Remove zos_copy dependency from zos_encode test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1157). diff --git a/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml b/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml deleted file mode 100644 index 9c8593c1a..000000000 --- a/changelogs/fragments/1165-remove-zos-copy-dep-from-zos-fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Remove zos_copy dependency from zos_fetch test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1165). diff --git a/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml b/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml deleted file mode 100644 index d7fb725af..000000000 --- a/changelogs/fragments/1167-remove-zos-copy-from-zos-blockinfile-tests.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_blockinfile - Remove zos_copy dependency from zos_blockinfile test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1167). \ No newline at end of file diff --git a/changelogs/fragments/1169-util-job-zoau-migration.yml b/changelogs/fragments/1169-util-job-zoau-migration.yml deleted file mode 100644 index 568aa9a4e..000000000 --- a/changelogs/fragments/1169-util-job-zoau-migration.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/job.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1169). diff --git a/changelogs/fragments/1170-enhancememt-make-pipeline-217-compatible.yml b/changelogs/fragments/1170-enhancememt-make-pipeline-217-compatible.yml deleted file mode 100644 index 92f2d99b5..000000000 --- a/changelogs/fragments/1170-enhancememt-make-pipeline-217-compatible.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test system - added ignore to sanity - (https://github.com/ansible-collections/ibm_zos_core/pull/1452). diff --git a/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml b/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml deleted file mode 100644 index a95e1c7e2..000000000 --- a/changelogs/fragments/1179-remove-zos_encode-from_zos_lineinfile-tests.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile - Remove zos_encode dependency from zos_lineinfile test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1179). \ No newline at end of file diff --git a/changelogs/fragments/1181-zoau-migration-zos_operator.yml b/changelogs/fragments/1181-zoau-migration-zos_operator.yml deleted file mode 100644 index 7c107de88..000000000 --- a/changelogs/fragments/1181-zoau-migration-zos_operator.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_operator - Update internal functions to account for the change to the - unit of measurement of `timeout` now in centiseconds. - (https://github.com/ansible-collections/ibm_zos_core/pull/1181). \ No newline at end of file diff --git a/changelogs/fragments/1182-migrate-module-utils-data-set.yml b/changelogs/fragments/1182-migrate-module-utils-data-set.yml deleted file mode 100644 index 857327254..000000000 --- a/changelogs/fragments/1182-migrate-module-utils-data-set.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/data_set.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1182). diff --git a/changelogs/fragments/1183-copy-members.yml b/changelogs/fragments/1183-copy-members.yml deleted file mode 100644 index b0b0c7896..000000000 --- a/changelogs/fragments/1183-copy-members.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_copy - Improve zos_copy performance when copying multiple members from one PDS/E to another. - (https://github.com/ansible-collections/ibm_zos_core/pull/1183). diff --git a/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml b/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml deleted file mode 100644 index 9085743d9..000000000 --- a/changelogs/fragments/1184-remove-zos-fetch-dep-from-zos-copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Remove zos_fetch dependency from zos_copy test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1184). diff --git a/changelogs/fragments/1187-migrate-module-utils-copy.yml b/changelogs/fragments/1187-migrate-module-utils-copy.yml deleted file mode 100644 index 26157f9fc..000000000 --- a/changelogs/fragments/1187-migrate-module-utils-copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/copy.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1187). diff --git a/changelogs/fragments/1188-migrate-module_utils-backup.yml b/changelogs/fragments/1188-migrate-module_utils-backup.yml deleted file mode 100644 index 65945d06b..000000000 --- a/changelogs/fragments/1188-migrate-module_utils-backup.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/backup.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1188). \ No newline at end of file diff --git a/changelogs/fragments/1189-migrate-module_utils-encode.yml b/changelogs/fragments/1189-migrate-module_utils-encode.yml deleted file mode 100644 index d7f471847..000000000 --- a/changelogs/fragments/1189-migrate-module_utils-encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/encode.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1189). diff --git a/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml b/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml deleted file mode 100644 index 4bb3a582d..000000000 --- a/changelogs/fragments/1190-migrate-module_utils-dd_statement.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/dd_statement.py - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1190). diff --git a/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml b/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml deleted file mode 100644 index 03f39b535..000000000 --- a/changelogs/fragments/1196-zoau-migration-zos_gather_facts.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_gather_facts - Update module internally to leverage ZOAU python API - for zinfo. - (https://github.com/ansible-collections/ibm_zos_core/pull/1196). \ No newline at end of file diff --git a/changelogs/fragments/1202-doc-gen-script-portability.yml b/changelogs/fragments/1202-doc-gen-script-portability.yml deleted file mode 100644 index 3c2e6ddbb..000000000 --- a/changelogs/fragments/1202-doc-gen-script-portability.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - docs/scripts - Change to sed "-i" in place option which ensures compatibility between MacOS - and GNU versions of sed command. - (https://github.com/ansible-collections/ibm_zos_core/pull/1202). diff --git a/changelogs/fragments/1204-migrate-zos_apf.yml b/changelogs/fragments/1204-migrate-zos_apf.yml deleted file mode 100644 index 89db1abd2..000000000 --- a/changelogs/fragments/1204-migrate-zos_apf.yml +++ /dev/null @@ -1,12 +0,0 @@ -bugfixes: - - zos_apf - List option only returned one data set. Fix now returns - the list of retrieved data sets. - (https://github.com/ansible-collections/ibm_zos_core/pull/1204). - -minor_changes: - - zos_apf - Enhanced error messages when an exception is caught. - (https://github.com/ansible-collections/ibm_zos_core/pull/1204). - -trivial: - - zos_apf - Migrated the module to use ZOAU v1.3.0 json schema. - (https://github.com/ansible-collections/ibm_zos_core/pull/1204). \ No newline at end of file diff --git a/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml b/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml deleted file mode 100644 index 6f58e2713..000000000 --- a/changelogs/fragments/1209-zoau-migration-zos_job_submit.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_submit - Migrated the module to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1209). \ No newline at end of file diff --git a/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml b/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml deleted file mode 100644 index be18056b3..000000000 --- a/changelogs/fragments/1215-Migrate_zos_operator_action_query.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_operator_action_query - Update internal functions to account for the change to the - unit of measurement of `timeout` now in centiseconds. - (https://github.com/ansible-collections/ibm_zos_core/pull/1215). \ No newline at end of file diff --git a/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml b/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml deleted file mode 100644 index 65d3d3c08..000000000 --- a/changelogs/fragments/1216-Validate_module_zos_job_output_migration.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_output - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1216). diff --git a/changelogs/fragments/1217-validate-job-query.yml b/changelogs/fragments/1217-validate-job-query.yml deleted file mode 100644 index df97c3ca6..000000000 --- a/changelogs/fragments/1217-validate-job-query.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_query - Removed zos_job_submit wait argument from tests. - (https://github.com/ansible-collections/ibm_zos_core/pull/1217). \ No newline at end of file diff --git a/changelogs/fragments/1218-migrate-zos_encode.yml b/changelogs/fragments/1218-migrate-zos_encode.yml deleted file mode 100644 index 3d712b749..000000000 --- a/changelogs/fragments/1218-migrate-zos_encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_encode - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1218). diff --git a/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml b/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml deleted file mode 100644 index 83d2391ba..000000000 --- a/changelogs/fragments/1220-bugfix-zos_job_submit-default_value.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_job_submit - Was ignoring the default value for location=DATA_SET, now - when location is not specified it will default to DATA_SET. - (https://github.com/ansible-collections/ibm_zos_core/pull/1220). \ No newline at end of file diff --git a/changelogs/fragments/1222-zoau-migration-zos_copy.yml b/changelogs/fragments/1222-zoau-migration-zos_copy.yml deleted file mode 100644 index edc6eec06..000000000 --- a/changelogs/fragments/1222-zoau-migration-zos_copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Migrated the module to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1222). \ No newline at end of file diff --git a/changelogs/fragments/1227-migrate-zos_archive.yml b/changelogs/fragments/1227-migrate-zos_archive.yml deleted file mode 100644 index 820593c95..000000000 --- a/changelogs/fragments/1227-migrate-zos_archive.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1227). diff --git a/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml b/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml deleted file mode 100644 index 67642d563..000000000 --- a/changelogs/fragments/1228-zos_find-remove-zos_lineinfile_dep.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_find - Removed zos_lineinfile dependency from test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1228). diff --git a/changelogs/fragments/1229-migrate-zos_fetch.yml b/changelogs/fragments/1229-migrate-zos_fetch.yml deleted file mode 100644 index 07f9a26b4..000000000 --- a/changelogs/fragments/1229-migrate-zos_fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1229). diff --git a/changelogs/fragments/1237-migrate-zos_mount.yml b/changelogs/fragments/1237-migrate-zos_mount.yml deleted file mode 100644 index d4787d42d..000000000 --- a/changelogs/fragments/1237-migrate-zos_mount.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - tests/functional/modules/test_zos_mount_func.py - migrate code to use - ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1237). diff --git a/changelogs/fragments/1238-migrate-zos_unarchive.yml b/changelogs/fragments/1238-migrate-zos_unarchive.yml deleted file mode 100644 index 8afe97d29..000000000 --- a/changelogs/fragments/1238-migrate-zos_unarchive.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1238). \ No newline at end of file diff --git a/changelogs/fragments/1242-zoau-migration-zos_data_set.yml b/changelogs/fragments/1242-zoau-migration-zos_data_set.yml deleted file mode 100644 index 851783900..000000000 --- a/changelogs/fragments/1242-zoau-migration-zos_data_set.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_data_set - Refactor data_set module_util and functional tests for ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1242). \ No newline at end of file diff --git a/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml b/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml deleted file mode 100644 index e2e841e9c..000000000 --- a/changelogs/fragments/1256_Migrate_zos_blockinfile_and_lineinfile.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_lineinfile - migrate code to use ZOAU v1.3.0. - - zos_blockinfile - migrate code to use ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1256). diff --git a/changelogs/fragments/1257-zoau-import-zos_apf.yml b/changelogs/fragments/1257-zoau-import-zos_apf.yml deleted file mode 100644 index 71b46ba1b..000000000 --- a/changelogs/fragments/1257-zoau-import-zos_apf.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_apf - Updated ZOAU imports from the module to capture traceback. - (https://github.com/ansible-collections/ibm_zos_core/pull/1257). diff --git a/changelogs/fragments/1261-job-submit-non-utf8-chars.yml b/changelogs/fragments/1261-job-submit-non-utf8-chars.yml deleted file mode 100644 index 7f322afe4..000000000 --- a/changelogs/fragments/1261-job-submit-non-utf8-chars.yml +++ /dev/null @@ -1,9 +0,0 @@ -bugfixes: - - module_utils/job.py - job output containing non-printable characters would - crash modules. Fix now handles the error gracefully and returns a message - to the user inside `content` of the `ddname` that failed. - (https://github.com/ansible-collections/ibm_zos_core/pull/1261). -trivial: - - zos_job_submit - add test case to validate a bugfix in ZOAU v1.3.0 that - handles non-UTF8 characters correctly in a job's output. - (https://github.com/ansible-collections/ibm_zos_core/pull/1261). \ No newline at end of file diff --git a/changelogs/fragments/1265_Migrate_zos_backup_restore.yml b/changelogs/fragments/1265_Migrate_zos_backup_restore.yml deleted file mode 100644 index 9afe4afc3..000000000 --- a/changelogs/fragments/1265_Migrate_zos_backup_restore.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - zos_backup_restore - Refactor zos_backup_restore module and functional tests for ZOAU v1.3.0. - (https://github.com/ansible-collections/ibm_zos_core/pull/1265). -minor_changes: - - zos_backup_restore - Add tmp_hlq option to the user interface to override the default high level qualifier - (HLQ) for temporary and backup. - (https://github.com/ansible-collections/ibm_zos_core/pull/1265). \ No newline at end of file diff --git a/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml b/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml deleted file mode 100644 index 1f6ba201d..000000000 --- a/changelogs/fragments/1270-quick-fix-len-of-volumes-work-around.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: - - module_utils/data_set.py - len(volme) was always called on receiving - DatasetVerificationError from Dataset.create() even though volumes=None was - a valid possible outcome. The fix adds a null check to the conditional. - (https://github.com/ansible-collections/ibm_zos_core/pull/1270). \ No newline at end of file diff --git a/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml b/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml deleted file mode 100644 index ef213b06f..000000000 --- a/changelogs/fragments/1286-update-zos_archive-zos_unarchive-docs.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: - - zos_archive - Updated examples to use path instead of src. - (https://github.com/ansible-collections/ibm_zos_core/pull/1286). - - zos_unarchive - Updated examples and return dict to use path instead of src. - (https://github.com/ansible-collections/ibm_zos_core/pull/1286). \ No newline at end of file diff --git a/changelogs/fragments/1292-doc-zos_tso_command-example.yml b/changelogs/fragments/1292-doc-zos_tso_command-example.yml deleted file mode 100644 index 6ed868be7..000000000 --- a/changelogs/fragments/1292-doc-zos_tso_command-example.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_tso_command - Added an example on how to chain multiple TSO commands such - that they are invoked together when dependent on each other. - (https://github.com/ansible-collections/ibm_zos_core/pull/1293). \ No newline at end of file diff --git a/changelogs/fragments/1295-doc-zos_ping-scp.yml b/changelogs/fragments/1295-doc-zos_ping-scp.yml deleted file mode 100644 index a9477150d..000000000 --- a/changelogs/fragments/1295-doc-zos_ping-scp.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - zos_ping - Update zos_ping documentation to instruct users how - to fall back to legacy SCP when using OpenSSH 9.0 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/1295). - - zos_ping - Update zos_ping REXX source to check for python - version 3.10 or later. - (https://github.com/ansible-collections/ibm_zos_core/pull/1295). \ No newline at end of file diff --git a/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml b/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml deleted file mode 100644 index ca1ea840e..000000000 --- a/changelogs/fragments/1298-Remove_local_charset_from_zos_fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Remove argument not documented. - (https://github.com/ansible-collections/ibm_zos_core/pull/1298). \ No newline at end of file diff --git a/changelogs/fragments/1307-update-sanity-zos_copy.yml b/changelogs/fragments/1307-update-sanity-zos_copy.yml deleted file mode 100644 index 858f0b64c..000000000 --- a/changelogs/fragments/1307-update-sanity-zos_copy.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: - - zos_copy - Documented `group` and `owner` options. - (https://github.com/ansible-collections/ibm_zos_core/pull/1307). - -trivial: - - zos_copy - Removed many of the variables that were passed from the - action plugin to the module, reimplementing the logic inside the - module instead. Removed the use of temp_path variable inside zos_copy - in favor of using remote_src to deal with files copied to remote. - (https://github.com/ansible-collections/ibm_zos_core/pull/1307). \ No newline at end of file diff --git a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml b/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml deleted file mode 100644 index 058faf66e..000000000 --- a/changelogs/fragments/1320-Zos_mvs_raw_ignores_tmp_hlq.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating temporary data sets. - Fix now honors the value if provided and uses it as High Level Qualifier for temporary data sets created - during the module execution. - (https://github.com/ansible-collections/ibm_zos_core/pull/1320). \ No newline at end of file diff --git a/changelogs/fragments/1322-update-docstring-encode.yml b/changelogs/fragments/1322-update-docstring-encode.yml deleted file mode 100644 index dd5eb5389..000000000 --- a/changelogs/fragments/1322-update-docstring-encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - encode - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1322). \ No newline at end of file diff --git a/changelogs/fragments/1323-Update_docstring-dd_statement.yml b/changelogs/fragments/1323-Update_docstring-dd_statement.yml deleted file mode 100644 index 6d94b2a94..000000000 --- a/changelogs/fragments/1323-Update_docstring-dd_statement.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - dd_statement - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1323). \ No newline at end of file diff --git a/changelogs/fragments/1331-update-docstring-ickdsf.yml b/changelogs/fragments/1331-update-docstring-ickdsf.yml deleted file mode 100644 index 545ba95c1..000000000 --- a/changelogs/fragments/1331-update-docstring-ickdsf.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - ickdsf - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1331). \ No newline at end of file diff --git a/changelogs/fragments/1332-update-docstring-import_handler.yml b/changelogs/fragments/1332-update-docstring-import_handler.yml deleted file mode 100644 index 5b32cd32e..000000000 --- a/changelogs/fragments/1332-update-docstring-import_handler.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - import_handler - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1332). \ No newline at end of file diff --git a/changelogs/fragments/1333-update-docstring-job.yml b/changelogs/fragments/1333-update-docstring-job.yml deleted file mode 100644 index 124ef2cae..000000000 --- a/changelogs/fragments/1333-update-docstring-job.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - job - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1333). \ No newline at end of file diff --git a/changelogs/fragments/1334-update-docstring-mcs_cmd.yml b/changelogs/fragments/1334-update-docstring-mcs_cmd.yml deleted file mode 100644 index ac2ad367f..000000000 --- a/changelogs/fragments/1334-update-docstring-mcs_cmd.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - mvs_cmd - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1334). \ No newline at end of file diff --git a/changelogs/fragments/1335-update-docstring-template.yml b/changelogs/fragments/1335-update-docstring-template.yml deleted file mode 100644 index 9020c18ae..000000000 --- a/changelogs/fragments/1335-update-docstring-template.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - template - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1335). \ No newline at end of file diff --git a/changelogs/fragments/1336-update-docstring-validation.yml b/changelogs/fragments/1336-update-docstring-validation.yml deleted file mode 100644 index 547103d46..000000000 --- a/changelogs/fragments/1336-update-docstring-validation.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - validation - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1336). \ No newline at end of file diff --git a/changelogs/fragments/1337-update-docstring-vtoc.yml b/changelogs/fragments/1337-update-docstring-vtoc.yml deleted file mode 100644 index 71974c682..000000000 --- a/changelogs/fragments/1337-update-docstring-vtoc.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - vtoc - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1337). \ No newline at end of file diff --git a/changelogs/fragments/1338-update-docstring-zoau_version_checker.yml b/changelogs/fragments/1338-update-docstring-zoau_version_checker.yml deleted file mode 100644 index 66d62760c..000000000 --- a/changelogs/fragments/1338-update-docstring-zoau_version_checker.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zoau_version_checker - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1338). \ No newline at end of file diff --git a/changelogs/fragments/1340-Work_around_fix_false_positive.yml b/changelogs/fragments/1340-Work_around_fix_false_positive.yml deleted file mode 100644 index 8e8360808..000000000 --- a/changelogs/fragments/1340-Work_around_fix_false_positive.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_blockinfile - Using double quotation marks inside a block resulted in a false - positive result with ZOAU 1.3. Fix now handles this special case to avoid false negatives. - (https://github.com/ansible-collections/ibm_zos_core/pull/1340). \ No newline at end of file diff --git a/changelogs/fragments/1342-update-docstring-zos_backup_restore.yml b/changelogs/fragments/1342-update-docstring-zos_backup_restore.yml deleted file mode 100644 index 07e529c72..000000000 --- a/changelogs/fragments/1342-update-docstring-zos_backup_restore.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_backup_restore - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1342). \ No newline at end of file diff --git a/changelogs/fragments/1343-update-docstring-zos_blockinline.yml b/changelogs/fragments/1343-update-docstring-zos_blockinline.yml deleted file mode 100644 index 570caa06f..000000000 --- a/changelogs/fragments/1343-update-docstring-zos_blockinline.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_blockinline - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1343). \ No newline at end of file diff --git a/changelogs/fragments/1344-update-docstring-zos_copy.yml b/changelogs/fragments/1344-update-docstring-zos_copy.yml deleted file mode 100644 index 90ecb9e24..000000000 --- a/changelogs/fragments/1344-update-docstring-zos_copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1344). \ No newline at end of file diff --git a/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml b/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml deleted file mode 100644 index a09b8fa64..000000000 --- a/changelogs/fragments/1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_job_submit - when the argument max_rc was different than 0 the changed response returned - as false. Fix now return a changed response as true when the rc is not 0 and max_rc is above - or equal to the value of the job. - (https://github.com/ansible-collections/ibm_zos_core/pull/1345). \ No newline at end of file diff --git a/changelogs/fragments/1347-update-docstring-zos_data_set.yml b/changelogs/fragments/1347-update-docstring-zos_data_set.yml deleted file mode 100644 index 581ab1aa9..000000000 --- a/changelogs/fragments/1347-update-docstring-zos_data_set.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_data_set - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1347). \ No newline at end of file diff --git a/changelogs/fragments/1348-update-docstring-zos_encode.yml b/changelogs/fragments/1348-update-docstring-zos_encode.yml deleted file mode 100644 index de9c11c17..000000000 --- a/changelogs/fragments/1348-update-docstring-zos_encode.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_encode - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1348). \ No newline at end of file diff --git a/changelogs/fragments/1349-update-docstring-zos_fetch.yml b/changelogs/fragments/1349-update-docstring-zos_fetch.yml deleted file mode 100644 index a38504c36..000000000 --- a/changelogs/fragments/1349-update-docstring-zos_fetch.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1349). \ No newline at end of file diff --git a/changelogs/fragments/1350-update-docstring-zos_find.yml b/changelogs/fragments/1350-update-docstring-zos_find.yml deleted file mode 100644 index 48c1fbce1..000000000 --- a/changelogs/fragments/1350-update-docstring-zos_find.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_find - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1350). \ No newline at end of file diff --git a/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml b/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml deleted file mode 100644 index 31fe8dfda..000000000 --- a/changelogs/fragments/1351-update-docstring-zos_gather_facts.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_gather_facts - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1351). \ No newline at end of file diff --git a/changelogs/fragments/1352-update-docstring-zos_job_output.yml b/changelogs/fragments/1352-update-docstring-zos_job_output.yml deleted file mode 100644 index 78aac0cac..000000000 --- a/changelogs/fragments/1352-update-docstring-zos_job_output.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_output - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1352). \ No newline at end of file diff --git a/changelogs/fragments/1353-update-docstring-zos_job_query.yml b/changelogs/fragments/1353-update-docstring-zos_job_query.yml deleted file mode 100644 index 550be9107..000000000 --- a/changelogs/fragments/1353-update-docstring-zos_job_query.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_query - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1353). \ No newline at end of file diff --git a/changelogs/fragments/1354-update-docstring-zos_job_submit.yml b/changelogs/fragments/1354-update-docstring-zos_job_submit.yml deleted file mode 100644 index c2c0a4b99..000000000 --- a/changelogs/fragments/1354-update-docstring-zos_job_submit.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_submit - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1354). \ No newline at end of file diff --git a/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml b/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml deleted file mode 100644 index 3840b2862..000000000 --- a/changelogs/fragments/1355-update-docstring-zos_lineinfile.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1355). \ No newline at end of file diff --git a/changelogs/fragments/1356-update-docstring-zos_mount.yml b/changelogs/fragments/1356-update-docstring-zos_mount.yml deleted file mode 100644 index a2c09caa5..000000000 --- a/changelogs/fragments/1356-update-docstring-zos_mount.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_mount - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1356). \ No newline at end of file diff --git a/changelogs/fragments/1361-update-docstring-zos_operator.yml b/changelogs/fragments/1361-update-docstring-zos_operator.yml deleted file mode 100644 index a1b928f14..000000000 --- a/changelogs/fragments/1361-update-docstring-zos_operator.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_operator - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1361). \ No newline at end of file diff --git a/changelogs/fragments/1362-update-docstring-file.yml b/changelogs/fragments/1362-update-docstring-file.yml deleted file mode 100644 index 3a86d6032..000000000 --- a/changelogs/fragments/1362-update-docstring-file.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - file - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1362). \ No newline at end of file diff --git a/changelogs/fragments/1363-update-docstring-system.yml b/changelogs/fragments/1363-update-docstring-system.yml deleted file mode 100644 index 461a4c9b9..000000000 --- a/changelogs/fragments/1363-update-docstring-system.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - system - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1363). diff --git a/changelogs/fragments/1374-enhancement-zos-find-gdg-gds-special-chars.yml b/changelogs/fragments/1374-enhancement-zos-find-gdg-gds-special-chars.yml deleted file mode 100644 index 33d43e56e..000000000 --- a/changelogs/fragments/1374-enhancement-zos-find-gdg-gds-special-chars.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_find - added support for GDG/GDS and special characters - (https://github.com/ansible-collections/ibm_zos_core/pull/1518). diff --git a/changelogs/fragments/1380-enhancement-add-sybols-zos_apf.yml b/changelogs/fragments/1380-enhancement-add-sybols-zos_apf.yml deleted file mode 100644 index 1cfcf96a7..000000000 --- a/changelogs/fragments/1380-enhancement-add-sybols-zos_apf.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_apf - Change input to auto-escape 'library' names containing symbols - (https://github.com/ansible-collections/ibm_zos_core/pull/1493). diff --git a/changelogs/fragments/1384-update-docstring-backup.yml b/changelogs/fragments/1384-update-docstring-backup.yml deleted file mode 100644 index 7c5689c61..000000000 --- a/changelogs/fragments/1384-update-docstring-backup.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - backup - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1384). \ No newline at end of file diff --git a/changelogs/fragments/1385-update-docstring-better_arg_parser.yml b/changelogs/fragments/1385-update-docstring-better_arg_parser.yml deleted file mode 100644 index 1b4a0b0f3..000000000 --- a/changelogs/fragments/1385-update-docstring-better_arg_parser.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - better_arg_parser - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1385). \ No newline at end of file diff --git a/changelogs/fragments/1386-gdg-symbols-support.yml b/changelogs/fragments/1386-gdg-symbols-support.yml deleted file mode 100644 index d920172b8..000000000 --- a/changelogs/fragments/1386-gdg-symbols-support.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - module_utils/data_set.py - Added new functions to support GDG and symbols. - (https://github.com/ansible-collections/ibm_zos_core/pull/1467). \ No newline at end of file diff --git a/changelogs/fragments/1387-update-docstring-copy.yml b/changelogs/fragments/1387-update-docstring-copy.yml deleted file mode 100644 index 6891259f0..000000000 --- a/changelogs/fragments/1387-update-docstring-copy.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - copy - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1387). \ No newline at end of file diff --git a/changelogs/fragments/1388-lowercase-choices.yml b/changelogs/fragments/1388-lowercase-choices.yml deleted file mode 100644 index 0f14f42fe..000000000 --- a/changelogs/fragments/1388-lowercase-choices.yml +++ /dev/null @@ -1,87 +0,0 @@ -breaking_changes: - - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - Suboption ``type`` of ``dest_data_set`` no longer accepts uppercase - choices, users should replace them with lowercase ones. - Suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_backup_restore - option ``space_type`` no longer accepts uppercase - choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - Suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``type`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - Option ``space_type`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - Option ``record_format`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - Options inside ``batch`` no longer accept uppercase choices, users should - replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_submit - option ``location`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``fs_type`` no longer accepts uppercase choices, - users should replace them with lowercase ones. - Option ``unmount_opts`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - Option ``mount_opts`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - Option ``tag_untagged`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - Option ``automove`` no longer accepts uppercase choices, users - should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - Suboptions ``disposition_normal`` and ``disposition_abnormal`` of - ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. - This also applies when defining a ``dd_data_set`` inside ``dd_concat``. - Suboption ``space_type`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dd_unix`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - Options inside ``dd_concat`` no longer accept uppercase choices, - users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - Suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - Suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - -trivial: - - zos_blockinfile - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_find - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_lineinfile - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_encode - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_fetch - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_output - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_query - updated tests to use lowercase options when calling - another module in the collection. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). \ No newline at end of file diff --git a/changelogs/fragments/1390-update-docstring-zos_script.yml b/changelogs/fragments/1390-update-docstring-zos_script.yml deleted file mode 100644 index 792bf9698..000000000 --- a/changelogs/fragments/1390-update-docstring-zos_script.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_script - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1390). \ No newline at end of file diff --git a/changelogs/fragments/1391-update-docstring-zos_tso_command.yml b/changelogs/fragments/1391-update-docstring-zos_tso_command.yml deleted file mode 100644 index c435799d4..000000000 --- a/changelogs/fragments/1391-update-docstring-zos_tso_command.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_tso_command - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1391). \ No newline at end of file diff --git a/changelogs/fragments/1392-update-docstring-zos_volume_init.yml b/changelogs/fragments/1392-update-docstring-zos_volume_init.yml deleted file mode 100644 index 4536f186c..000000000 --- a/changelogs/fragments/1392-update-docstring-zos_volume_init.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_volume_init - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1392). \ No newline at end of file diff --git a/changelogs/fragments/1393-update-docstring-zos_apf.yml b/changelogs/fragments/1393-update-docstring-zos_apf.yml deleted file mode 100644 index 8a89b7aa0..000000000 --- a/changelogs/fragments/1393-update-docstring-zos_apf.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_apf - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1393). \ No newline at end of file diff --git a/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml b/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml deleted file mode 100644 index 25c34fd89..000000000 --- a/changelogs/fragments/1394-Update_docstring-zos_operator_action_query.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_operator_action_query - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1394). \ No newline at end of file diff --git a/changelogs/fragments/1415-Update_docstring-zos_archive.yml b/changelogs/fragments/1415-Update_docstring-zos_archive.yml deleted file mode 100644 index 77f607a62..000000000 --- a/changelogs/fragments/1415-Update_docstring-zos_archive.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_archive - Updated docstrings to numpy style for visual aid to developers. - (https://github.com/ansible-collections/ibm_zos_core/pull/1415). \ No newline at end of file diff --git a/changelogs/fragments/1443-zos_find-filter-size.yml b/changelogs/fragments/1443-zos_find-filter-size.yml deleted file mode 100644 index a5a8ce029..000000000 --- a/changelogs/fragments/1443-zos_find-filter-size.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets the correct size - for PDS/Es. - (https://github.com/ansible-collections/ibm_zos_core/pull/1443). \ No newline at end of file diff --git a/changelogs/fragments/1470-redesign_mvs_raw.yml b/changelogs/fragments/1470-redesign_mvs_raw.yml deleted file mode 100644 index 5fc3ae6dd..000000000 --- a/changelogs/fragments/1470-redesign_mvs_raw.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_mvs_raw - Redesign the wrappers of dd clases to use properly the arguments. - (https://github.com/ansible-collections/ibm_zos_core/pull/1470). \ No newline at end of file diff --git a/changelogs/fragments/1484-update-ac-tool-ansible-lint.yml b/changelogs/fragments/1484-update-ac-tool-ansible-lint.yml deleted file mode 100644 index bb5f247a9..000000000 --- a/changelogs/fragments/1484-update-ac-tool-ansible-lint.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - ac - Update ac tool with ansible-lint 6.22, update mount tables, add - support so any venv can be started. - (https://github.com/ansible-collections/ibm_zos_core/pull/1484). \ No newline at end of file diff --git a/changelogs/fragments/1488-zos_copy-refactor-force.yml b/changelogs/fragments/1488-zos_copy-refactor-force.yml deleted file mode 100644 index ec91f11aa..000000000 --- a/changelogs/fragments/1488-zos_copy-refactor-force.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_copy - use keyword argument force when copying data sets, instead - of a dictionary. - (https://github.com/ansible-collections/ibm_zos_core/pull/1488). \ No newline at end of file diff --git a/changelogs/fragments/1495-default-values-data-set-class.yml b/changelogs/fragments/1495-default-values-data-set-class.yml deleted file mode 100644 index eb8118ad2..000000000 --- a/changelogs/fragments/1495-default-values-data-set-class.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - module_utils/data_set.py - add default values to the init method of - MVSDataSet. - (https://github.com/ansible-collections/ibm_zos_core/pull/1495). \ No newline at end of file diff --git a/changelogs/fragments/1496-fix-gds-resolve.yml b/changelogs/fragments/1496-fix-gds-resolve.yml deleted file mode 100644 index 17683da96..000000000 --- a/changelogs/fragments/1496-fix-gds-resolve.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - module_utils/data_set.py - resolve_gds_absolute_name was returning a ZOAU dataset type instead - of string. - (https://github.com/ansible-collections/ibm_zos_core/pull/1496). \ No newline at end of file diff --git a/changelogs/fragments/1497-gdg-support-zos-job-submit.yml b/changelogs/fragments/1497-gdg-support-zos-job-submit.yml deleted file mode 100644 index d03309289..000000000 --- a/changelogs/fragments/1497-gdg-support-zos-job-submit.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_job_submit - add support for generation data groups and generation - data sets as sources for jobs. - (https://github.com/ansible-collections/ibm_zos_core/pull/1497) \ No newline at end of file diff --git a/changelogs/fragments/1504-zos_data_set-gdg-support.yml b/changelogs/fragments/1504-zos_data_set-gdg-support.yml deleted file mode 100644 index 42becb638..000000000 --- a/changelogs/fragments/1504-zos_data_set-gdg-support.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_data_set - Added support for GDG and GDS relative name notation to create, delete, - catalog and uncatalog a data set. Added support for data set names with special characters - like $, /#, /- and @. - (https://github.com/ansible-collections/ibm_zos_core/pull/1504). \ No newline at end of file diff --git a/changelogs/fragments/1507-zos_operator-docs.yml b/changelogs/fragments/1507-zos_operator-docs.yml deleted file mode 100644 index e12e66eb6..000000000 --- a/changelogs/fragments/1507-zos_operator-docs.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_operator - Added a doc entry to inform users they need to escape certain special chars. - (https://github.com/ansible-collections/ibm_zos_core/pull/1507). \ No newline at end of file diff --git a/changelogs/fragments/1511-zos_archive_unarchive-gdg-support.yml b/changelogs/fragments/1511-zos_archive_unarchive-gdg-support.yml deleted file mode 100644 index e94c81ec2..000000000 --- a/changelogs/fragments/1511-zos_archive_unarchive-gdg-support.yml +++ /dev/null @@ -1,7 +0,0 @@ -minor_changes: - - zos_archive - Added support for GDG and GDS relative name notation to archive data sets. - Added support for data set names with special characters like $, /#, /- and @. - (https://github.com/ansible-collections/ibm_zos_core/pull/1511). - - zos_unarchive - Added support for data set names with special characters - like $, /#, /- and @. - (https://github.com/ansible-collections/ibm_zos_core/pull/1511). \ No newline at end of file diff --git a/changelogs/fragments/1512-bugfix-zos_job_submit-error-type.yml b/changelogs/fragments/1512-bugfix-zos_job_submit-error-type.yml deleted file mode 100644 index 5078064ec..000000000 --- a/changelogs/fragments/1512-bugfix-zos_job_submit-error-type.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_job_submit - Was not propagating any error types UnicodeDecodeError, - JSONDecodeError, TypeError, KeyError when encountered, now the error - message shares the type error. - (https://github.com/ansible-collections/ibm_zos_core/pull/1560). \ No newline at end of file diff --git a/changelogs/fragments/1515-gdg_batch_creation.yml b/changelogs/fragments/1515-gdg_batch_creation.yml deleted file mode 100644 index 019705699..000000000 --- a/changelogs/fragments/1515-gdg_batch_creation.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_data_set - Batch mode when type=gdg failed asking for limit option. - Fix now accepts limit as part of batch option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1515). \ No newline at end of file diff --git a/changelogs/fragments/1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml b/changelogs/fragments/1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml deleted file mode 100644 index 6a32a484a..000000000 --- a/changelogs/fragments/1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml +++ /dev/null @@ -1,9 +0,0 @@ -minor_changes: - - zos_lineinfile - Added support for GDG and GDS relative name notation to use a data set. - And backup in new generations. Added support for data set names with special characters - like $, /#, /- and @. - (https://github.com/ansible-collections/ibm_zos_core/pull/1516). - - zos_blockinfile - Added support for GDG and GDS relative name notation to use a data set. - And backup in new generations. Added support for data set names with special characters - like $, /#, /- and @. - (https://github.com/ansible-collections/ibm_zos_core/pull/1516). \ No newline at end of file diff --git a/changelogs/fragments/1519-zos_fetch-gdg-support.yml b/changelogs/fragments/1519-zos_fetch-gdg-support.yml deleted file mode 100644 index dd9126ddf..000000000 --- a/changelogs/fragments/1519-zos_fetch-gdg-support.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_fetch - add support for fetching generation data groups and - generation data sets. - (https://github.com/ansible-collections/ibm_zos_core/pull/1519) \ No newline at end of file diff --git a/changelogs/fragments/1525-mvs_raw_support_gdg_gds_special_character.yml b/changelogs/fragments/1525-mvs_raw_support_gdg_gds_special_character.yml deleted file mode 100644 index 969347bdc..000000000 --- a/changelogs/fragments/1525-mvs_raw_support_gdg_gds_special_character.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_mvs_raw - Added support for GDG and GDS relative name notation to use a data set. - Added support for data set names with special characters like $, /#, /- and @. - (https://github.com/ansible-collections/ibm_zos_core/pull/1525). \ No newline at end of file diff --git a/changelogs/fragments/1527-zos_backup-gdg.yml b/changelogs/fragments/1527-zos_backup-gdg.yml deleted file mode 100644 index 9d84127b9..000000000 --- a/changelogs/fragments/1527-zos_backup-gdg.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_data_set - Added support for GDS relative name notation to include or exclude data sets when - operation is backup. Added support for data set names with special characters - like $, /#, and @. - (https://github.com/ansible-collections/ibm_zos_core/pull/1527). \ No newline at end of file diff --git a/changelogs/fragments/1531-zos_encode_gdg_support.yml b/changelogs/fragments/1531-zos_encode_gdg_support.yml deleted file mode 100644 index 44735aff2..000000000 --- a/changelogs/fragments/1531-zos_encode_gdg_support.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_encode - add support for encoding generation data sets (GDS), as well - as using a GDS for backup. - (https://github.com/ansible-collections/ibm_zos_core/pull/1531). \ No newline at end of file diff --git a/changelogs/fragments/1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml b/changelogs/fragments/1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml deleted file mode 100644 index b1501b050..000000000 --- a/changelogs/fragments/1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - module_util/data_set.py - DataSet.data_set_cataloged function previously only returned - True or False, but failed to account for exceptions which occurred during the LISTCAT. - The fix now raises an MVSCmdExecError if the return code from LISTCAT is too high. - (https://github.com/ansible-collections/ibm_zos_core/pull/1535). diff --git a/changelogs/fragments/1541-output_mvs_raw_gds_positive_was_false_positive.yml b/changelogs/fragments/1541-output_mvs_raw_gds_positive_was_false_positive.yml deleted file mode 100644 index 3ed2efe0a..000000000 --- a/changelogs/fragments/1541-output_mvs_raw_gds_positive_was_false_positive.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_mvs_raw - Added support for GDG and GDS relative positive name notation to use a data set. - (https://github.com/ansible-collections/ibm_zos_core/pull/1541). \ No newline at end of file diff --git a/changelogs/fragments/1543-mvs_raw_fix_verbose_and_first_character.yml b/changelogs/fragments/1543-mvs_raw_fix_verbose_and_first_character.yml deleted file mode 100644 index 0cbcc202e..000000000 --- a/changelogs/fragments/1543-mvs_raw_fix_verbose_and_first_character.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_mvs_raw - DD_output first character from each line was missing. Change now includes the first character - of each line. - (https://github.com/ansible-collections/ibm_zos_core/pull/1543). diff --git a/changelogs/fragments/1550-lower_case_idcams_utility.yml b/changelogs/fragments/1550-lower_case_idcams_utility.yml deleted file mode 100644 index 121ca56ae..000000000 --- a/changelogs/fragments/1550-lower_case_idcams_utility.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_operator - Pass capital letters to the command to idcams utility. - (https://github.com/ansible-collections/ibm_zos_core/pull/1550). \ No newline at end of file diff --git a/changelogs/fragments/1552-readme-support-updates.yml b/changelogs/fragments/1552-readme-support-updates.yml deleted file mode 100644 index 43611e88e..000000000 --- a/changelogs/fragments/1552-readme-support-updates.yml +++ /dev/null @@ -1,10 +0,0 @@ -trivial: - - README - updated formatting and how it was written to be clearer. - (https://github.com/ansible-collections/ibm_zos_core/pull/1559). - - - release_notes.rst - removed known issue that was incorrect for v1.9.x. - (https://github.com/ansible-collections/ibm_zos_core/pull/1559). - - - releases_maintenance.rst - Added new lifecycle stages and supported - shell types. - (https://github.com/ansible-collections/ibm_zos_core/pull/1559). diff --git a/changelogs/fragments/1553-Console_parallel.yml b/changelogs/fragments/1553-Console_parallel.yml deleted file mode 100644 index 3c879a0ce..000000000 --- a/changelogs/fragments/1553-Console_parallel.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_operator - Validate the use of two terminals with a parallel call of zos_operator. - (https://github.com/ansible-collections/ibm_zos_core/pull/1553). \ No newline at end of file diff --git a/changelogs/fragments/1561-remove_deep_copy.yml b/changelogs/fragments/1561-remove_deep_copy.yml deleted file mode 100644 index b6cdd4c75..000000000 --- a/changelogs/fragments/1561-remove_deep_copy.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: - - zos_job_submit - Improved the copy to remote mechanic to avoid using deepcopy that could - result in failure for some systems. - (https://github.com/ansible-collections/ibm_zos_core/pull/1561). - - zos_script - Improved the copy to remote mechanic to avoid using deepcopy that could - result in failure for some systems. - (https://github.com/ansible-collections/ibm_zos_core/pull/1561). - - zos_unarchive - Improved the copy to remote mechanic to avoid using deepcopy that could - result in failure for some systems. - (https://github.com/ansible-collections/ibm_zos_core/pull/1561). \ No newline at end of file diff --git a/changelogs/fragments/1563-zos_tso_command-gdg-support.yml b/changelogs/fragments/1563-zos_tso_command-gdg-support.yml deleted file mode 100644 index aadbbfa60..000000000 --- a/changelogs/fragments/1563-zos_tso_command-gdg-support.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_mvs_raw - Added support for GDG and GDS relative name notation to use a data set name. - Added support for data set names with special characters like $, /#, /- and @. - (https://github.com/ansible-collections/ibm_zos_core/pull/1563). \ No newline at end of file diff --git a/changelogs/fragments/1564-zos_copy_gdg_support.yml b/changelogs/fragments/1564-zos_copy_gdg_support.yml deleted file mode 100644 index b9f908bdb..000000000 --- a/changelogs/fragments/1564-zos_copy_gdg_support.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_copy - add support for copying generation data sets (GDS) and - generation data groups (GDG), as well as using a GDS for backup. - (https://github.com/ansible-collections/ibm_zos_core/pull/1564). \ No newline at end of file diff --git a/changelogs/fragments/1565-remove-deprecated-pipes-library.yml b/changelogs/fragments/1565-remove-deprecated-pipes-library.yml deleted file mode 100644 index e031caa50..000000000 --- a/changelogs/fragments/1565-remove-deprecated-pipes-library.yml +++ /dev/null @@ -1,11 +0,0 @@ -trivial: - - zos_find - remove deprecated library pipes in favor of shlex. - (https://github.com/ansible-collections/ibm_zos_core/pull/1565). - - zos_mvs_raw - remove deprecated library pipes in favor of shlex. - (https://github.com/ansible-collections/ibm_zos_core/pull/1565). - - module_utils/backup.py - remove deprecated library pipes in favor of shlex. - (https://github.com/ansible-collections/ibm_zos_core/pull/1565). - - module_utils/copy.py - remove deprecated library pipes in favor of shlex. - (https://github.com/ansible-collections/ibm_zos_core/pull/1565). - - module_utils/encode.py - remove deprecated library pipes in favor of shlex. - (https://github.com/ansible-collections/ibm_zos_core/pull/1565). diff --git a/changelogs/fragments/1570-compute-member-name-zos_copy.yml b/changelogs/fragments/1570-compute-member-name-zos_copy.yml deleted file mode 100644 index d57a94a8f..000000000 --- a/changelogs/fragments/1570-compute-member-name-zos_copy.yml +++ /dev/null @@ -1,6 +0,0 @@ -bugfixes: - - zos_copy - a regression in version 1.4.0 made the module stop automatically - computing member names when copying a single file into a PDS/E. Fix now - lets a user copy a single file into a PDS/E without adding a member in the - dest option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1570). \ No newline at end of file diff --git a/changelogs/fragments/1623-zos_copy-avoid-opercmd.yml b/changelogs/fragments/1623-zos_copy-avoid-opercmd.yml deleted file mode 100644 index 4b7546fcb..000000000 --- a/changelogs/fragments/1623-zos_copy-avoid-opercmd.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - module would use opercmd to check if a non existent - destination data set is locked. Fix now only checks if the destination - is already present. - (https://github.com/ansible-collections/ibm_zos_core/pull/1623). \ No newline at end of file diff --git a/changelogs/fragments/1631-enabler-zos_mount-special-character-support.yml b/changelogs/fragments/1631-enabler-zos_mount-special-character-support.yml deleted file mode 100644 index 7880390a1..000000000 --- a/changelogs/fragments/1631-enabler-zos_mount-special-character-support.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_mount - Added support for data set names with special characters ($, /#, /- and @). - This is for both src and backup data set names. - (https://github.com/ansible-collections/ibm_zos_core/pull/1631). \ No newline at end of file diff --git a/changelogs/fragments/1632-Validate_to_restore_keep_orginial_hlq.yml b/changelogs/fragments/1632-Validate_to_restore_keep_orginial_hlq.yml deleted file mode 100644 index 1bd6b4d6a..000000000 --- a/changelogs/fragments/1632-Validate_to_restore_keep_orginial_hlq.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_backup_restore - Redefines the default behavior of module option `hlq`. - When option `operation` is set to `restore` and the `hlq` is not provided, - the original high level qualifiers in a backup will be used for a restore. - (https://github.com/ansible-collections/ibm_zos_core/pull/1632). diff --git a/changelogs/fragments/1633-zos_mvs_raw_tests_portability.yml b/changelogs/fragments/1633-zos_mvs_raw_tests_portability.yml deleted file mode 100644 index b6ecfca69..000000000 --- a/changelogs/fragments/1633-zos_mvs_raw_tests_portability.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - test_zos_mvs_raw_func.py - Remove the use of hard coded dataset - names. - (https://github.com/ansible-collections/ibm_zos_core/pull/1633). \ No newline at end of file diff --git a/changelogs/fragments/1634-updates-for-non-utf8-depr-warning.yml b/changelogs/fragments/1634-updates-for-non-utf8-depr-warning.yml deleted file mode 100644 index 4cb001049..000000000 --- a/changelogs/fragments/1634-updates-for-non-utf8-depr-warning.yml +++ /dev/null @@ -1,25 +0,0 @@ -minor_changes: - - zos_mvs_raw - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_script - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_tso_command - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - -trivial: - - zos_archive - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_blockinfile - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_copy - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_fetch - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_find - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_lineinfile - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_mount - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_unarchive - Un-mappable chars in stdout/stderr streams are now replaced with the replacement character. - (https://github.com/ansible-collections/ibm_zos_core/pull/1634). \ No newline at end of file diff --git a/changelogs/fragments/1635-backup_restore_portability.yml b/changelogs/fragments/1635-backup_restore_portability.yml deleted file mode 100644 index dd5bf061a..000000000 --- a/changelogs/fragments/1635-backup_restore_portability.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test_zos_backup_restore - Remove the use of hard coded hlq and files names. - (https://github.com/ansible-collections/ibm_zos_core/pull/1635). \ No newline at end of file diff --git a/changelogs/fragments/1639-zos_tso_command_portability.yml b/changelogs/fragments/1639-zos_tso_command_portability.yml deleted file mode 100644 index 8dbb40376..000000000 --- a/changelogs/fragments/1639-zos_tso_command_portability.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test_zos_tso_command_func.py - Remove the use of hard coded user. - (https://github.com/ansible-collections/ibm_zos_core/pull/1639). \ No newline at end of file diff --git a/changelogs/fragments/1640-quick-fix-ansible-core:2.17-verbosity-issue b/changelogs/fragments/1640-quick-fix-ansible-core:2.17-verbosity-issue deleted file mode 100644 index 0b8ffe9a6..000000000 --- a/changelogs/fragments/1640-quick-fix-ansible-core:2.17-verbosity-issue +++ /dev/null @@ -1,7 +0,0 @@ -bugfixes: - - zos_copy - module would fail when an internal SFTP command wrote output to - stderr. Fix sets default value of existing module option `ignore_sftp_error` to True - (https://github.com/ansible-collections/ibm_zos_core/pull/1640). - - zos_fetch - module would fail when an internal SFTP command wrote output to - stderr. Fix sets default value of existing module option `ignore_sftp_error` to True - (https://github.com/ansible-collections/ibm_zos_core/pull/1640). \ No newline at end of file diff --git a/changelogs/fragments/1641-case-sensitivity-zos_operator.yml b/changelogs/fragments/1641-case-sensitivity-zos_operator.yml deleted file mode 100644 index 1079776f9..000000000 --- a/changelogs/fragments/1641-case-sensitivity-zos_operator.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_operator - Added new option ``case_sensitive`` to module, allowing users - to control how case in a command is handled by it. - (https://github.com/ansible-collections/ibm_zos_core/pull/1641) \ No newline at end of file diff --git a/changelogs/fragments/1642-Ensure_portability_zos_encode.yml b/changelogs/fragments/1642-Ensure_portability_zos_encode.yml deleted file mode 100644 index d1dd95b6e..000000000 --- a/changelogs/fragments/1642-Ensure_portability_zos_encode.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - test_zos_encode_func.py - Remove the use of hard coded dataset - and file names. - (https://github.com/ansible-collections/ibm_zos_core/pull/1642). \ No newline at end of file diff --git a/changelogs/fragments/1643-Validate_parameter_recover_to_tolerate_enqueue.yml b/changelogs/fragments/1643-Validate_parameter_recover_to_tolerate_enqueue.yml deleted file mode 100644 index b7eb789f0..000000000 --- a/changelogs/fragments/1643-Validate_parameter_recover_to_tolerate_enqueue.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_backup_restore - When a recoverable error was encountered and ``recover=True``, - the module would ignore the option and fail. Fix now does not fail when a recoverable - error is raised when ``recover=True``. - (https://github.com/ansible-collections/ibm_zos_core/pull/1643). diff --git a/changelogs/fragments/1647-doc-backup-restore-racf-class.yml b/changelogs/fragments/1647-doc-backup-restore-racf-class.yml deleted file mode 100644 index 07c0abcf6..000000000 --- a/changelogs/fragments/1647-doc-backup-restore-racf-class.yml +++ /dev/null @@ -1,5 +0,0 @@ -trivial: - - zos_backup_restore - Added supplemental documentation explaining the RACF class - requirement when using module option restore. - (https://github.com/ansible-collections/ibm_zos_core/pull/1647). - diff --git a/changelogs/fragments/1654-zos_apf_tests_change_temphlq.yml b/changelogs/fragments/1654-zos_apf_tests_change_temphlq.yml deleted file mode 100644 index c36a77176..000000000 --- a/changelogs/fragments/1654-zos_apf_tests_change_temphlq.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - test_zos_apf_func - Change temp HLQ used in the test suite for a - standardized one. - (https://github.com/ansible-collections/ibm_zos_core/pull/1654). \ No newline at end of file diff --git a/changelogs/fragments/1656-zos_find_portability.yml b/changelogs/fragments/1656-zos_find_portability.yml deleted file mode 100644 index ca32a4580..000000000 --- a/changelogs/fragments/1656-zos_find_portability.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test_zos_find_func.py - Remove the use of hard coded dataset names. - (https://github.com/ansible-collections/ibm_zos_core/pull/1656). \ No newline at end of file diff --git a/changelogs/fragments/1657-test_fetch_portability.yml b/changelogs/fragments/1657-test_fetch_portability.yml deleted file mode 100644 index 97c5bbc5b..000000000 --- a/changelogs/fragments/1657-test_fetch_portability.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_fetch - Remove the use of hard coded dataset and files names. - (https://github.com/ansible-collections/ibm_zos_core/pull/1657). \ No newline at end of file diff --git a/changelogs/fragments/1658-job_submit_portability.yml b/changelogs/fragments/1658-job_submit_portability.yml deleted file mode 100644 index 83aeb281d..000000000 --- a/changelogs/fragments/1658-job_submit_portability.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_job_submit - Remove the use of hard coded dataset and files names. - (https://github.com/ansible-collections/ibm_zos_core/pull/1658). \ No newline at end of file diff --git a/changelogs/fragments/1661-job-owner-valid-characters.yml b/changelogs/fragments/1661-job-owner-valid-characters.yml deleted file mode 100644 index 7632ca96d..000000000 --- a/changelogs/fragments/1661-job-owner-valid-characters.yml +++ /dev/null @@ -1,9 +0,0 @@ -bugfixes: - - zos_job_output - RACF user names containing a ``@``, ``$``, or ``#`` - raised an invalid argument error. Fix now allows the use of all valid - characters for a RACF user. - (https://github.com/ansible-collections/ibm_zos_core/pull/1661). - - zos_job_query - RACF user names containing a ``@``, ``$``, or ``#`` - raised an invalid argument error. Fix now allows the use of all valid - characters for a RACF user. - (https://github.com/ansible-collections/ibm_zos_core/pull/1661). \ No newline at end of file diff --git a/changelogs/fragments/1664-portability-zos_copy.yml b/changelogs/fragments/1664-portability-zos_copy.yml deleted file mode 100644 index 267027273..000000000 --- a/changelogs/fragments/1664-portability-zos_copy.yml +++ /dev/null @@ -1,11 +0,0 @@ -trivial: - - zos_copy - Remove the use of hard coded dataset and files names using a module_utils function. - (https://github.com/ansible-collections/ibm_zos_core/pull/1664). - - zos_backup_restore - Remove the use of hard coded dataset and files names using a module_utils function. - (https://github.com/ansible-collections/ibm_zos_core/pull/1664). - - zos_encode - Remove the use of hard coded dataset and files names using a module_utils function. - (https://github.com/ansible-collections/ibm_zos_core/pull/1664). - - zos_fetch - Remove the use of hard coded dataset and files names using a module_utils function. - (https://github.com/ansible-collections/ibm_zos_core/pull/1664). - - zos_job_submit - Remove the use of hard coded dataset and files names using a module_utils function. - (https://github.com/ansible-collections/ibm_zos_core/pull/1664). \ No newline at end of file diff --git a/changelogs/fragments/1673-return-job-type.yml b/changelogs/fragments/1673-return-job-type.yml deleted file mode 100644 index 1da8ca503..000000000 --- a/changelogs/fragments/1673-return-job-type.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: - - zos_job_query - Added address space type used by jobs in return JSON - as `content_type`. - (https://github.com/ansible-collections/ibm_zos_core/pull/1673). - - zos_job_output - Added address space type used by jobs in return JSON - as `content_type`. - (https://github.com/ansible-collections/ibm_zos_core/pull/1673). - - zos_job_submit - Added address space type used by jobs in return JSON - as `content_type`. - (https://github.com/ansible-collections/ibm_zos_core/pull/1673). \ No newline at end of file diff --git a/changelogs/fragments/1676-portability_zos_blockinfile.yml b/changelogs/fragments/1676-portability_zos_blockinfile.yml deleted file mode 100644 index bb0ee4b9c..000000000 --- a/changelogs/fragments/1676-portability_zos_blockinfile.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_blockinfile- Remove the use of hard coded dataset and files names. - (https://github.com/ansible-collections/ibm_zos_core/pull/1676). \ No newline at end of file diff --git a/changelogs/fragments/1677-zos_job_query_portability.yaml b/changelogs/fragments/1677-zos_job_query_portability.yaml deleted file mode 100644 index 799721b28..000000000 --- a/changelogs/fragments/1677-zos_job_query_portability.yaml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Remove the use of hard coded job id for test cases. - (https://github.com/ansible-collections/ibm_zos_core/pull/1667). \ No newline at end of file diff --git a/changelogs/fragments/1684-Add_validation_for_marker_begin_end.yml b/changelogs/fragments/1684-Add_validation_for_marker_begin_end.yml deleted file mode 100644 index 067a17784..000000000 --- a/changelogs/fragments/1684-Add_validation_for_marker_begin_end.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_blockinfile - Previously module was not able to delete a block when 'marker_begin' and 'marker_end' - were set to the same value. Fix introduces a requirement for 'marker_begin' and 'marker_end' to have different values. - (https://github.com/ansible-collections/ibm_zos_core/pull/1684). \ No newline at end of file diff --git a/changelogs/fragments/1687-lineinfile_portability.yml b/changelogs/fragments/1687-lineinfile_portability.yml deleted file mode 100644 index df00fc9a2..000000000 --- a/changelogs/fragments/1687-lineinfile_portability.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_lineinfile- Remove the use of hard coded dataset and files names. - (https://github.com/ansible-collections/ibm_zos_core/pull/1687). \ No newline at end of file diff --git a/changelogs/fragments/1689-add-non-utf8-testcase.yml b/changelogs/fragments/1689-add-non-utf8-testcase.yml deleted file mode 100644 index 7ad1c8190..000000000 --- a/changelogs/fragments/1689-add-non-utf8-testcase.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_job_submit - Added a test case for non-printable UTF8 characters - to validate ZOAU changes. This covers the full EBCDIC range. - (https://github.com/ansible-collections/ibm_zos_core/pull/1689). \ No newline at end of file diff --git a/changelogs/fragments/1691-zos-mvs-raw-base64-mode.yml b/changelogs/fragments/1691-zos-mvs-raw-base64-mode.yml deleted file mode 100644 index 7064e914a..000000000 --- a/changelogs/fragments/1691-zos-mvs-raw-base64-mode.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_mvs_raw - base64 sub-option for return_content under option for retrieving - DD output did not return base64. Fix now returns the base64 encoded contents of the DD. - (https://github.com/ansible-collections/ibm_zos_core/pull/1691). \ No newline at end of file diff --git a/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml b/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml deleted file mode 100644 index 2539501ea..000000000 --- a/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml +++ /dev/null @@ -1,31 +0,0 @@ -bugfixes: - - zos_apf - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_archive - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_blockinfile - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_copy - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_data_set - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_encode - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_fetch - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_lineinfile - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_mount - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_unarchive - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). \ No newline at end of file diff --git a/changelogs/fragments/1698-multiple-args-zos_script.yml b/changelogs/fragments/1698-multiple-args-zos_script.yml deleted file mode 100644 index 636c882f6..000000000 --- a/changelogs/fragments/1698-multiple-args-zos_script.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_script - The module would discard command line arguments in a command, - except for the first one. Fix now makes sure that all arguments are - passed to the remote command that gets executed. - (https://github.com/ansible-collections/ibm_zos_core/pull/1698). \ No newline at end of file diff --git a/changelogs/fragments/1739-tmp_files_not_use_tmp_folder.yml b/changelogs/fragments/1739-tmp_files_not_use_tmp_folder.yml deleted file mode 100644 index 6cf07266b..000000000 --- a/changelogs/fragments/1739-tmp_files_not_use_tmp_folder.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_copy - Previously, the module ignored the value of ``remote_tmp`` set in Ansible configuration file - and used the ``/tmp/`` directory. Fix now uses the value of ``remote_tmp`` or the default value ``~/.ansible/tmp`` - if none is given. - (https://github.com/ansible-collections/ibm_zos_core/pull/1739). diff --git a/changelogs/fragments/1761-system-subsystem-job_query.yml b/changelogs/fragments/1761-system-subsystem-job_query.yml deleted file mode 100644 index 36d8abb02..000000000 --- a/changelogs/fragments/1761-system-subsystem-job_query.yml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: - - zos_job_query - Module was not returning values for system and subsystem. Fix now returns these values. - (https://github.com/ansible-collections/ibm_zos_core/pull/1761). diff --git a/changelogs/fragments/1766-zos_copy-racf-uacc-updates.yml b/changelogs/fragments/1766-zos_copy-racf-uacc-updates.yml deleted file mode 100644 index fba927f0d..000000000 --- a/changelogs/fragments/1766-zos_copy-racf-uacc-updates.yml +++ /dev/null @@ -1,12 +0,0 @@ -bugfixes: - - zos_copy - Improve module zos_copy error handling when the user does not have - universal access authority set to UACC(READ) for SAF Profile - 'MVS.MCSOPER.ZOAU' and SAF Class OPERCMDS. The module now handles the exception - and returns an informative message. - (https://github.com/ansible-collections/ibm_zos_core/pull/1766). -trivial: - - pipeline - Deliver a new users.py framework that allows functional test cases to - request a managed user type where this user can have limited access to some SAF - profile, or saf class as well as user id's with specific patterns such as including - supported special characters such as '@', '#', etc. - (https://github.com/ansible-collections/ibm_zos_core/pull/1766). diff --git a/changelogs/fragments/692-changelog-lint-ac-tool.yml b/changelogs/fragments/692-changelog-lint-ac-tool.yml deleted file mode 100644 index cbf6bab7d..000000000 --- a/changelogs/fragments/692-changelog-lint-ac-tool.yml +++ /dev/null @@ -1,8 +0,0 @@ -trivial: - - ac - Added new command ac-changelog into ac tool to run changelog - fragments lint and changelog release generation. - (https://github.com/ansible-collections/ibm_zos_core/pull/1304). - - - workflows/ac_changelog - Added new github action that will lint - changelog fragments upon a new pull request. - (https://github.com/ansible-collections/ibm_zos_core/pull/1304). \ No newline at end of file diff --git a/changelogs/fragments/828-adds-concurrent-executor.yml b/changelogs/fragments/828-adds-concurrent-executor.yml deleted file mode 100644 index 9da97836e..000000000 --- a/changelogs/fragments/828-adds-concurrent-executor.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - ce.py - Adds the concurrent executor capable of running - test cases concurrently against a pool of managed nodes. - (https://github.com/ansible-collections/ibm_zos_core/pull/828). - - zinventory-raw - a new pytest fixture that can accept a JSON - vs a configuration file. - (https://github.com/ansible-collections/ibm_zos_core/pull/828). diff --git a/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml b/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml deleted file mode 100644 index e02daed4c..000000000 --- a/changelogs/fragments/971-bug-job_submit-can-stacktrace.yml +++ /dev/null @@ -1,6 +0,0 @@ -trivial: - - job.py - generalized resolution of query_exception that may be thrown. - This should prevent the stack trace. - (https://github.com/ansible-collections/ibm_zos_core/pull/1383). - - test_zos_job_submit.py - Removed code that was hiding if a duration was not returned. - (https://github.com/ansible-collections/ibm_zos_core/pull/1383). diff --git a/changelogs/fragments/992-fix-sanity4to6.yml b/changelogs/fragments/992-fix-sanity4to6.yml deleted file mode 100644 index 3d9637c63..000000000 --- a/changelogs/fragments/992-fix-sanity4to6.yml +++ /dev/null @@ -1,7 +0,0 @@ -trivial: - - zos_data_set.py - Corrected references to input variable definitions - (https://github.com/ansible-collections/ibm_zos_core/pull/1285). - - data_set.py - Updated exception handler to match what was returned. - (https://github.com/ansible-collections/ibm_zos_core/pull/1285). - - test_zos_data_set_func.py - Removed test of discontinued function. - (https://github.com/ansible-collections/ibm_zos_core/pull/1285). From 090617617509f18a37a62da25ee3a9f467609260 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 27 Oct 2024 15:50:16 -0700 Subject: [PATCH 489/495] Add timeouts to job submits Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/functional/modules/test_zos_data_set_func.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index f210377d9..3d227ab35 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -221,7 +221,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss" + src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -267,7 +267,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss" + src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -315,7 +315,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss" + src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -356,7 +356,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present( hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss") + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30) # verify data set creation was successful for result in results.contacted.values(): @@ -371,7 +371,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present( hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss") + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30) # verify data set creation was successful for result in results.contacted.values(): From fbd8f44e90cdc21b1f86b94af1709eb04192c9b1 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 27 Oct 2024 15:52:50 -0700 Subject: [PATCH 490/495] Add IDs to test so pytest can collect it without escaping special chars Signed-off-by: ddimatos <dimatos@gmail.com> --- tests/unit/test_zos_backup_restore_unit.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_zos_backup_restore_unit.py b/tests/unit/test_zos_backup_restore_unit.py index 5920febdb..c6e8ce9c8 100644 --- a/tests/unit/test_zos_backup_restore_unit.py +++ b/tests/unit/test_zos_backup_restore_unit.py @@ -241,7 +241,10 @@ def test_invalid_sms_classes(zos_backup_restore_mocker, sms_class): @pytest.mark.parametrize( "volume", - ["0000000", "", "@!!&$", "HELLOWORLD"], + [pytest.param("0000000", id='six_zeros'), + pytest.param("", id='empty_string'), + pytest.param("@!!&$", id='symbols'), + pytest.param("HELLOWORLD", id='hello_world')] ) def test_invalid_volumes(zos_backup_restore_mocker, volume): valid_args = dict( From 6ca1be95af12a2b2db6923e7139f50afa972337c Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 27 Oct 2024 23:16:17 -0700 Subject: [PATCH 491/495] Merged two lines in notes Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 74916ee33..e4d0fc865 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -37,9 +37,10 @@ Bugfixes - ``zos_backup_restore`` - when a recoverable error was encountered and **recover = True**, the module would fail. The change now allows the module to recover. -- ``zos_blockinfile`` - when the modules **marker_begin** and **marker_end** were set to the same value, the module would not delete the block. Now the module requires the **marker_begin** and **marker_end** to have different values. +- ``zos_blockinfile`` -- ``zos_blockinfile`` - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option.. + - when the modules **marker_begin** and **marker_end** were set to the same value, the module would not delete the block. Now the module requires the **marker_begin** and **marker_end** to have different values. + - module option **tmp_hlq** was previously ignored and default values were used. Now the module uses the value set in the option.. - ``zos_copy`` From 0a394cd5d14fab17e6d47bb902e727f9c4c2a1fa Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Sun, 27 Oct 2024 23:17:54 -0700 Subject: [PATCH 492/495] Fixed missing space Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/release_notes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index e4d0fc865..099aab59a 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -57,6 +57,7 @@ Bugfixes - ``zos_job_output`` - module would raise an invalid argument error for a user ID that contained **@**, **$**, or **#**. Now the module supports RACF user naming conventions. - ``zos_job_query`` + - module did not return values for properties **system** and **subsystem**. Now the module returns these values. - module would raise an invalid argument error for a user ID that contained **@**, **$**, or **#**. Now the module supports RACF user naming conventions. From d25c7e3740c387b83dfd34eed5c78cfe5397e858 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 28 Oct 2024 13:29:52 -0700 Subject: [PATCH 493/495] Update the modules tmp_hlq description Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_backup_restore.rst | 13 +++---------- plugins/modules/zos_backup_restore.py | 9 +++++++-- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index 456bba84c..9b8759e42 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -207,18 +207,11 @@ hlq tmp_hlq - Override the default high level qualifier (HLQ) for temporary data sets. + Override the default high level qualifier (HLQ) for temporary data sets used in the modules operation. - If original HLQ is not available, then the value of ``TMPHLQ`` is used. + If *tmp_hlq* is set, this value will be applied to all temporary data sets. - | **required**: False - | **type**: str - - -tmp_hlq - Override the default high level qualifier (HLQ) for temporary and backup data sets. - - The default HLQ is the Ansible user that executes the module and if that is not available, then the value of ``TMPHLQ`` is used. + If *tmp_hlq* is not set, the value will be the username who submits the ansible task, this is the default behavior. If the username can not be identified, the value ``TMPHLQ`` is used. | **required**: False | **type**: str diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index cd2e0b00d..c58ca4197 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -191,8 +191,13 @@ required: false tmp_hlq: description: - - Override the default high level qualifier (HLQ) for temporary data sets. - - If original HLQ is not available, then the value of C(TMPHLQ) is used. + - Override the default high level qualifier (HLQ) for temporary + data sets used in the modules operation. + - If I(tmp_hlq) is set, this value will be applied to all temporary + data sets. + - If I(tmp_hlq) is not set, the value will be the username who submits + the ansible task, this is the default behavior. If the username can + not be identified, the value C(TMPHLQ) is used. required: false type: str notes: From 0ae284ed11374f47101df4032f81030579eca4e9 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 28 Oct 2024 13:40:40 -0700 Subject: [PATCH 494/495] Update module doc to better explain the sftp error option Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_copy.rst | 2 +- docs/source/modules/zos_fetch.rst | 2 +- plugins/modules/zos_copy.py | 2 +- plugins/modules/zos_fetch.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 8e8cb42bf..9f5e74a46 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -185,7 +185,7 @@ force_lock ignore_sftp_stderr - During data transfer through SFTP, the SFTP command directs content to stderr. By default, the module essentially ignores the stderr stream produced by SFTP and continues execution. The user is able to override this behavior by setting this parameter to ``false``. By doing so, any content written to stderr is considered an error by Ansible and will have module fail. + During data transfer through SFTP, the SFTP command directs content to stderr. By default, the module essentially ignores the stderr stream produced by SFTP and continues execution. The user is able to override this behavior by setting this parameter to ``false``. By doing so, any content written to stderr is considered an error by Ansible and will cause the module to fail. When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index e3f0df325..0c424fd02 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -126,7 +126,7 @@ tmp_hlq ignore_sftp_stderr - During data transfer through SFTP, the SFTP command directs content to stderr. By default, the module essentially ignores the stderr stream produced by SFTP and continues execution. The user is able to override this behavior by setting this parameter to ``false``. By doing so, any content written to stderr is considered an error by Ansible and will have module fail. + During data transfer through SFTP, the SFTP command directs content to stderr. By default, the module essentially ignores the stderr stream produced by SFTP and continues execution. The user is able to override this behavior by setting this parameter to ``false``. By doing so, any content written to stderr is considered an error by Ansible and will cause the module to fail. When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using **-vvvv** or through environment variables such as **verbosity = 4**, then this parameter will automatically be set to ``true``. diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 31552860e..7e48d8d3f 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -206,7 +206,7 @@ produced by SFTP and continues execution. The user is able to override this behavior by setting this parameter to C(false). By doing so, any content written to stderr is considered an error by Ansible and will - have module fail. + cause the module to fail. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using B(-vvvv) or through environment variables such as B(verbosity = 4), then this parameter will diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index cd00b4216..7096feffc 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -127,7 +127,7 @@ produced by SFTP and continues execution. The user is able to override this behavior by setting this parameter to C(false). By doing so, any content written to stderr is considered an error by Ansible and will - have module fail. + cause the module to fail. - When Ansible verbosity is set to greater than 3, either through the command line interface (CLI) using B(-vvvv) or through environment variables such as B(verbosity = 4), then this parameter will From b9adb7522d14bdd522af9a2de459f944fc5965f0 Mon Sep 17 00:00:00 2001 From: ddimatos <dimatos@gmail.com> Date: Mon, 28 Oct 2024 22:30:44 -0700 Subject: [PATCH 495/495] doc updates that came from the PR Signed-off-by: ddimatos <dimatos@gmail.com> --- docs/source/modules/zos_blockinfile.rst | 6 +++--- plugins/modules/zos_blockinfile.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index e9cd6f472..72c6889f9 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -104,7 +104,7 @@ insertbefore marker_begin This will be inserted at ``{mark}`` in the opening ansible block marker. - Value needs to be different from marker_end. + Value needs to be different from *marker_end*. | **required**: False | **type**: str @@ -114,7 +114,7 @@ marker_begin marker_end This will be inserted at ``{mark}`` in the closing ansible block marker. - Value must be different from marker_end. + Value must be different from *marker_begin*. | **required**: False | **type**: str @@ -128,7 +128,7 @@ backup The backup file name will be returned on either success or failure of module execution such that data can be retrieved. - Use generation data set (GDS) relative positive name. ``e.g. SOME.CREATION(+1``) + Use generation data set (GDS) relative positive name. e.g. *SOME.CREATION(+1*). | **required**: False | **type**: bool diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index ef4abd68f..f322c4a73 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -96,7 +96,7 @@ marker_begin: description: - This will be inserted at C({mark}) in the opening ansible block marker. - - Value needs to be different from marker_end. + - Value needs to be different from I(marker_end). required: false type: str default: BEGIN @@ -104,7 +104,7 @@ required: false description: - This will be inserted at C({mark}) in the closing ansible block marker. - - Value must be different from marker_end. + - Value must be different from I(marker_begin). type: str default: END backup: @@ -114,7 +114,7 @@ - When set to C(true), the module creates a backup file or data set. - The backup file name will be returned on either success or failure of module execution such that data can be retrieved. - - Use generation data set (GDS) relative positive name. C(e.g. SOME.CREATION(+1)) + - Use generation data set (GDS) relative positive name. e.g. I(SOME.CREATION(+1)). required: false type: bool default: false